Skip to content

Commit

Permalink
Add ruff rules for bandit (S)
Browse files Browse the repository at this point in the history
  • Loading branch information
cbornet committed Oct 11, 2024
1 parent 2adda78 commit 5bd6bfc
Show file tree
Hide file tree
Showing 26 changed files with 71 additions and 36 deletions.
2 changes: 1 addition & 1 deletion src/backend/base/langflow/base/astra_assistants/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def get_patched_openai_client(shared_component_cache):


url = "https://raw.githubusercontent.com/BerriAI/litellm/refs/heads/main/model_prices_and_context_window.json"
response = requests.get(url)
response = requests.get(url, timeout=10)
data = json.loads(response.text)

# Extract the model names into a Python list
Expand Down
6 changes: 3 additions & 3 deletions src/backend/base/langflow/base/curl/parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def normalize_newlines(multiline_text):

def parse_curl_command(curl_command):
tokens = shlex.split(normalize_newlines(curl_command))
tokens = [token for token in tokens if token and token != " "]
tokens = [token for token in tokens if token and token != " "] # noqa: S105
if tokens and "curl" not in tokens[0]:
msg = "Invalid curl command"
raise ValueError(msg)
Expand All @@ -79,7 +79,7 @@ def parse_curl_command(curl_command):
i = 0
while i < len(tokens):
token = tokens[i]
if token == "-X":
if token == "-X": # noqa: S105
i += 1
args["method"] = tokens[i].lower()
method_on_curl = tokens[i].lower()
Expand All @@ -92,7 +92,7 @@ def parse_curl_command(curl_command):
elif token in ("-H", "--header"):
i += 1
args["headers"].append(tokens[i])
elif token == "--compressed":
elif token == "--compressed": # noqa: S105
args["compressed"] = True
elif token in ("-k", "--insecure"):
args["insecure"] = True
Expand Down
2 changes: 1 addition & 1 deletion src/backend/base/langflow/base/data/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ def parse_text_file_to_data(file_path: str, silent_errors: bool) -> Data | None:
elif file_path.endswith((".yaml", ".yml")):
text = yaml.safe_load(text)
elif file_path.endswith(".xml"):
xml_element = ET.fromstring(text)
xml_element = ET.fromstring(text) # noqa: S314
text = ET.tostring(xml_element, encoding="unicode")
except Exception as e:
if not silent_errors:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def _add_content_to_page(self, markdown_text: str, block_id: str) -> dict[str, A
"children": blocks,
}

response = requests.patch(url, headers=headers, json=data)
response = requests.patch(url, headers=headers, json=data, timeout=10)
response.raise_for_status()

return response.json()
Expand Down
2 changes: 1 addition & 1 deletion src/backend/base/langflow/components/Notion/create_page.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def _create_notion_page(self, database_id: str, properties_json: str) -> dict[st
}

try:
response = requests.post("https://api.notion.com/v1/pages", headers=headers, json=data)
response = requests.post("https://api.notion.com/v1/pages", headers=headers, json=data, timeout=10)
response.raise_for_status()
return response.json()
except requests.exceptions.RequestException as e:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def _fetch_database_properties(self, database_id: str) -> dict | str:
"Notion-Version": "2022-06-28", # Use the latest supported version
}
try:
response = requests.get(url, headers=headers)
response = requests.get(url, headers=headers, timeout=10)
response.raise_for_status()
data = response.json()
return data.get("properties", {})
Expand Down
2 changes: 1 addition & 1 deletion src/backend/base/langflow/components/Notion/list_pages.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ def _query_notion_database(self, database_id: str, query_json: str | None = None
return f"Invalid JSON format for query: {e}"

try:
response = requests.post(url, headers=headers, json=query_payload)
response = requests.post(url, headers=headers, json=query_payload, timeout=10)
response.raise_for_status()
results = response.json()
return results["results"]
Expand Down
2 changes: 1 addition & 1 deletion src/backend/base/langflow/components/Notion/list_users.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def _list_users(self) -> list[dict]:
"Notion-Version": "2022-06-28",
}

response = requests.get(url, headers=headers)
response = requests.get(url, headers=headers, timeout=10)
response.raise_for_status()

data = response.json()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def _retrieve_page_content(self, page_id: str) -> str:
"Notion-Version": "2022-06-28",
}
try:
blocks_response = requests.get(blocks_url, headers=headers)
blocks_response = requests.get(blocks_url, headers=headers, timeout=10)
blocks_response.raise_for_status()
blocks_data = blocks_response.json()
return self.parse_blocks(blocks_data.get("results", []))
Expand Down
2 changes: 1 addition & 1 deletion src/backend/base/langflow/components/Notion/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def _search_notion(
"sort": {"direction": sort_direction, "timestamp": "last_edited_time"},
}

response = requests.post(url, headers=headers, json=data)
response = requests.post(url, headers=headers, json=data, timeout=10)
response.raise_for_status()

results = response.json()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def _update_notion_page(self, page_id: str, properties: str | dict[str, Any]) ->

try:
logger.info(f"Sending request to Notion API: URL: {url}, Data: {json.dumps(data)}")
response = requests.patch(url, headers=headers, json=data)
response = requests.patch(url, headers=headers, json=data, timeout=10)
response.raise_for_status()
updated_page = response.json()

Expand Down
2 changes: 1 addition & 1 deletion src/backend/base/langflow/components/agents/JsonAgent.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def build_agent(self) -> AgentExecutor:
path = Path(self.path)
if self.path.endswith("yaml") or self.path.endswith("yml"):
with path.open() as file:
yaml_dict = yaml.load(file, Loader=yaml.FullLoader)
yaml_dict = yaml.full_load(file)
spec = JsonSpec(dict_=yaml_dict)
else:
spec = JsonSpec.from_file(path)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def build_agent(self) -> AgentExecutor:
path = Path(self.path)
if self.path.endswith("yaml") or self.path.endswith("yml"):
with path.open() as file:
yaml_dict = yaml.load(file, Loader=yaml.FullLoader)
yaml_dict = yaml.full_load(file)
spec = JsonSpec(dict_=yaml_dict)
else:
spec = JsonSpec.from_file(path)
Expand Down
4 changes: 3 additions & 1 deletion src/backend/base/langflow/components/data/GoogleDrive.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,9 @@ class Config:
msg = f"Error loading documents: {e}"
raise ValueError(msg) from e

assert len(docs) == 1, "Expected a single document to be loaded."
if len(docs) != 1:
msg = "Expected a single document to be loaded."
raise ValueError(msg)

data = docs_to_data(docs)
# Return the loaded documents
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,9 @@ def compute_similarity(self) -> Data:
embedding_vectors: list[Data] = self.embedding_vectors

# Assert that the list contains exactly two Data objects
assert len(embedding_vectors) == 2, "Exactly two embedding vectors are required." # noqa: PLR2004
if len(embedding_vectors) != 2: # noqa: PLR2004
msg = "Exactly two embedding vectors are required."
raise ValueError(msg)

embedding_1 = np.array(embedding_vectors[0].data["embeddings"])
embedding_2 = np.array(embedding_vectors[1].data["embeddings"])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,9 @@ def embed_documents(self, texts: list[str]) -> list[list[float]]:
for index, future in futures:
try:
result_data = future.result()
assert len(result_data["data"]) == 1, "Expected one embedding"
if len(result_data["data"]) != 1:
msg = "Expected one embedding"
raise ValueError(msg)
embeddings[index] = result_data["data"][0]["embedding"]
except (
httpx.HTTPStatusError,
Expand Down
2 changes: 1 addition & 1 deletion src/backend/base/langflow/components/models/GroqModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def get_models(self) -> list[str]:
headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"}

try:
response = requests.get(url, headers=headers)
response = requests.get(url, headers=headers, timeout=10)
response.raise_for_status()
model_list = response.json()
return [model["id"] for model in model_list.get("data", [])]
Expand Down
3 changes: 2 additions & 1 deletion src/backend/base/langflow/components/tools/SearXNGTool.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def update_build_config(self, build_config: dotdict, field_value: Any, field_nam
try:
url = f"{field_value}/config"

response = requests.get(url=url, headers=self.search_headers.copy())
response = requests.get(url=url, headers=self.search_headers.copy(), timeout=10)
data = None
if response.headers.get("Content-Encoding") == "zstd":
data = json.loads(response.content)
Expand Down Expand Up @@ -105,6 +105,7 @@ def search(query: str, categories: Sequence[str] = ()) -> list:
"language": SearxSearch._language,
"format": "json",
},
timeout=10,
).json()

num_results = min(SearxSearch._max_results, len(response["results"]))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ def parse_callable_details(self, node: ast.FunctionDef) -> dict[str, Any]:

# Handle cases where the type is not found in the constructed environment
with contextlib.suppress(NameError):
return_type = eval(return_type_str, eval_env)
return_type = eval(return_type_str, eval_env) # noqa: S307

func = CallableCodeDetails(
name=node.name,
Expand Down
34 changes: 26 additions & 8 deletions src/backend/base/langflow/graph/graph/ascii.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
from grandalf.layouts import SugiyamaLayout
from grandalf.routing import EdgeViewer, route_with_lines

MINIMUM_EDGE_VIEW_POINTS = 2


class VertexViewer:
"""Class to define vertex box boundaries that will be accounted for during graph building by grandalf."""
Expand All @@ -42,8 +44,12 @@ class AsciiCanvas:
"""Class for drawing in ASCII."""

def __init__(self, cols, lines):
assert cols > 1
assert lines > 1
if cols <= 1:
msg = "cols must be greater than 1"
raise ValueError(msg)
if lines <= 1:
msg = "lines must be greater than 1"
raise ValueError(msg)
self.cols = cols
self.lines = lines
self.canvas = [[" "] * cols for _ in range(lines)]
Expand All @@ -61,9 +67,15 @@ def draw(self):

def point(self, x, y, char):
"""Create a point on ASCII canvas."""
assert len(char) == 1
assert 0 <= x < self.cols
assert 0 <= y < self.lines
if len(char) != 1:
msg = "char must be a single character"
raise ValueError(msg)
if x < 0 or x >= self.cols:
msg = "x is out of bounds"
raise ValueError(msg)
if y < 0 or y >= self.lines:
msg = "y is out of bounds"
raise ValueError(msg)
self.canvas[y][x] = char

def line(self, x0, y0, x1, y1, char):
Expand Down Expand Up @@ -93,8 +105,12 @@ def text(self, x, y, text):

def box(self, x0, y0, width, height):
"""Create a box on ASCII canvas."""
assert width > 1
assert height > 1
if width <= 1:
msg = "width must be greater than 1"
raise ValueError(msg)
if height <= 1:
msg = "height must be greater than 1"
raise ValueError(msg)
width -= 1
height -= 1

Expand Down Expand Up @@ -162,7 +178,9 @@ def draw_graph(vertexes, edges, return_ascii=True):
canvas = AsciiCanvas(canvas_cols, canvas_lines)

for edge in sug.g.sE:
assert len(edge.view._pts) > 1
if len(edge.view._pts) < MINIMUM_EDGE_VIEW_POINTS:
msg = "edge.view._pts must have at least 2 points"
raise ValueError(msg)
for index in range(1, len(edge.view._pts)):
start = edge.view._pts[index - 1]
end = edge.view._pts[index]
Expand Down
2 changes: 1 addition & 1 deletion src/backend/base/langflow/helpers/flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,7 @@ async def flow_function({func_args}):

compiled_func = compile(func_body, "<string>", "exec")
local_scope: dict = {}
exec(compiled_func, globals(), local_scope)
exec(compiled_func, globals(), local_scope) # noqa: S102
return local_scope["flow_function"]


Expand Down
2 changes: 1 addition & 1 deletion src/backend/base/langflow/inputs/input_mixin.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
class FieldTypes(str, Enum):
TEXT = "str"
INTEGER = "int"
PASSWORD = "str" # noqa: PIE796
PASSWORD = "str" # noqa: PIE796, S105
FLOAT = "float"
BOOLEAN = "bool"
DICT = "dict"
Expand Down
2 changes: 1 addition & 1 deletion src/backend/base/langflow/io/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def create_input_schema(inputs: list["InputTypes"]) -> type[BaseModel]:
literal_string = f"Literal{input_model.options}"
# validate that the literal_string is a valid literal

field_type = eval(literal_string, {"Literal": Literal})
field_type = eval(literal_string, {"Literal": Literal}) # noqa: S307
if hasattr(input_model, "is_list") and input_model.is_list:
field_type = list[field_type] # type: ignore[valid-type]
if input_model.name:
Expand Down
2 changes: 1 addition & 1 deletion src/backend/base/langflow/services/settings/constants.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
DEFAULT_SUPERUSER = "langflow"
DEFAULT_SUPERUSER_PASSWORD = "langflow"
DEFAULT_SUPERUSER_PASSWORD = "langflow" # noqa: S105
VARIABLES_TO_GET_FROM_ENVIRONMENT = [
"OPENAI_API_KEY",
"ANTHROPIC_API_KEY",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,12 @@ def __init__(self, namespace: str = "langflow"):
# initialize the Kubernetes API client
self.core_api = client.CoreV1Api()

def create_secret(self, name: str, data: dict, secret_type: str = "Opaque"):
def create_secret(
self,
name: str,
data: dict,
secret_type: str = "Opaque", # noqa: S107
):
"""
Create a new secret in the specified namespace.
Expand All @@ -46,7 +51,7 @@ def create_secret(self, name: str, data: dict, secret_type: str = "Opaque"):

return self.core_api.create_namespaced_secret(self.namespace, secret)

def upsert_secret(self, secret_name: str, data: dict, secret_type: str = "Opaque"):
def upsert_secret(self, secret_name: str, data: dict):
"""
Upsert a secret in the specified namespace.
If the secret doesn't exist, it will be created.
Expand Down
7 changes: 6 additions & 1 deletion src/backend/base/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,6 @@ ignore = [
"DOC",
"FBT",
"N",
"S",
"SLF",
"TRY3",
]
Expand All @@ -69,6 +68,12 @@ ignore = [
"langflow/api/v1/*" = [
"TCH", # FastAPI needs to evaluate types at runtime
]
"langflow/{components/tools/PythonCodeStructuredTool.py,custom/code_parser/code_parser.py,utils/validate.py}" = [
"S102", # Use of exec
]
"langflow/services/cache/*" = [
"S301", # Use of pickle
]

[tool.uv]
dev-dependencies = [
Expand Down

0 comments on commit 5bd6bfc

Please sign in to comment.