Skip to content

Commit 03ebfb7

Browse files
committed
chore(version): bump version to 0.11.2
1 parent 510bd1d commit 03ebfb7

File tree

3 files changed

+31
-12
lines changed

3 files changed

+31
-12
lines changed

flowllm/op/crawl/crawl4ai_op.py

Lines changed: 20 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import asyncio
2+
from typing import TYPE_CHECKING
23

3-
from crawl4ai import BrowserConfig, CrawlerRunConfig, CacheMode, AsyncWebCrawler
44
from loguru import logger
55

66
from flowllm.context.flow_context import FlowContext
@@ -9,6 +9,9 @@
99
from flowllm.schema.tool_call import ToolCall
1010
from flowllm.utils.web_utils import get_random_user_agent
1111

12+
if TYPE_CHECKING:
13+
from crawl4ai import BrowserConfig, CrawlerRunConfig, AsyncWebCrawler
14+
1215

1316
@C.register_op(register_app="FlowLLM")
1417
class Crawl4aiOp(BaseAsyncToolOp):
@@ -24,14 +27,8 @@ def __init__(self,
2427
**kwargs)
2528

2629
self.max_content_len: int = max_content_len
27-
28-
self.browser_config = BrowserConfig(headless=True,
29-
java_script_enabled=True,
30-
user_agent=get_random_user_agent(),
31-
viewport={"width": 1280, "height": 800},
32-
verbose=True)
33-
34-
self.crawler_config = CrawlerRunConfig(cache_mode=CacheMode.BYPASS, verbose=True)
30+
self.browser_config = None
31+
self.crawler_config = None
3532

3633
def build_tool_call(self) -> ToolCall:
3734
return ToolCall(**{
@@ -46,6 +43,9 @@ def build_tool_call(self) -> ToolCall:
4643
})
4744

4845
async def async_execute(self):
46+
# Lazy import crawl4ai only when actually needed
47+
from crawl4ai import BrowserConfig, CrawlerRunConfig, CacheMode, AsyncWebCrawler
48+
4949
url: str = self.input_dict["url"]
5050

5151
if self.enable_cache:
@@ -54,6 +54,17 @@ async def async_execute(self):
5454
self.set_result(cached_result["response_content"])
5555
return
5656

57+
# Initialize configs lazily
58+
self.browser_config = BrowserConfig(
59+
headless=True,
60+
java_script_enabled=True,
61+
user_agent=get_random_user_agent(),
62+
viewport={"width": 1280, "height": 800},
63+
verbose=True
64+
)
65+
66+
self.crawler_config = CrawlerRunConfig(cache_mode=CacheMode.BYPASS, verbose=True)
67+
5768
async with AsyncWebCrawler(config=self.browser_config) as crawler:
5869
result = await crawler.arun(url=url, config=self.crawler_config)
5970
response_content = result.markdown[:self.max_content_len]

flowllm/op/fin_supply/akshare_op.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,14 @@
22
import json
33
import sys
44
from io import StringIO
5-
from typing import Optional
5+
from typing import Optional, TYPE_CHECKING
66

7-
import akshare as ak
87
import pandas as pd
98
from loguru import logger
109

10+
if TYPE_CHECKING:
11+
import akshare as ak
12+
1113
from flowllm.context import FlowContext
1214
from flowllm.context.service_context import C
1315
from flowllm.enumeration.role import Role
@@ -40,6 +42,7 @@ def build_tool_call(self) -> ToolCall:
4042

4143
@staticmethod
4244
def download_a_stock_df():
45+
import akshare as ak
4346
stock_sh_a_spot_em_df = ak.stock_sh_a_spot_em()
4447
stock_sz_a_spot_em_df = ak.stock_sz_a_spot_em()
4548
stock_bj_a_spot_em_df = ak.stock_bj_a_spot_em()

pyproject.toml

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,7 @@ dependencies = [
4848
"uvicorn[standard]>=0.23.0",
4949
"pyfiglet>=0.8.0",
5050
"rich>=13.5.0",
51+
"tavily-python>=0.2.0",
5152
]
5253

5354
[project.optional-dependencies]
@@ -56,7 +57,7 @@ reme = ["elasticsearch>=8.10.0", "chromadb>=0.4.0"]
5657

5758
ray = ["ray>=2.6.0", "scikit-learn>=1.3.0"]
5859

59-
fin = ["akshare>=1.9.0", "crawl4ai>=0.7.4", "tavily-python>=0.2.0"]
60+
fin = ["akshare>=1.9.0", "crawl4ai>=0.7.4"]
6061

6162
token = ["modelscope>=1.10.0", "diffusers>=0.21.0", "peft>=0.6.0", "transformers>=4.35.0", "tiktoken>=0.5.0"]
6263

@@ -74,3 +75,7 @@ flowllm = [
7475

7576
[project.scripts]
7677
flowllm = "flowllm.app:main"
78+
79+
# conda create -n fl_test python=3.12
80+
# conda activate fl_test
81+
# conda env remove -n fl_test

0 commit comments

Comments
 (0)