@@ -3,54 +3,54 @@ setup() {
33}
44
55@test " outputs the HTML with Star Wars products" {
6- run uv run --with httpx python lego.py
6+ run uv run --with= httpx python lego.py
77 [[ " $output " == * " Millennium Falcon" * ]]
88}
99
1010@test " counts the number of F1 Academy teams" {
11- run uv run --with httpx --with beautifulsoup4 python f1academy_teams.py
11+ run uv run --with= httpx --with= beautifulsoup4 python f1academy_teams.py
1212 (( status == 0 ))
1313 [[ -n " $output " ]]
1414}
1515
1616@test " counts the number of F1 Academy drivers" {
17- run uv run --with httpx --with beautifulsoup4 python f1academy_drivers.py
17+ run uv run --with= httpx --with= beautifulsoup4 python f1academy_drivers.py
1818 (( status == 0 ))
1919 [[ -n " $output " ]]
2020}
2121
2222@test " lists African countries" {
23- run uv run --with httpx --with beautifulsoup4 python wikipedia_countries.py
23+ run uv run --with= httpx --with= beautifulsoup4 python wikipedia_countries.py
2424 (( status == 0 ))
2525 [[ -n " $output " ]]
2626}
2727
2828@test " lists African countries with a single selector" {
29- run uv run --with httpx --with beautifulsoup4 python wikipedia_countries_single_selector.py
29+ run uv run --with= httpx --with= beautifulsoup4 python wikipedia_countries_single_selector.py
3030 (( status == 0 ))
3131 [[ -n " $output " ]]
3232}
3333
3434@test " lists Guardian F1 article titles" {
35- run uv run --with httpx --with beautifulsoup4 python guardian_f1_titles.py
35+ run uv run --with= httpx --with= beautifulsoup4 python guardian_f1_titles.py
3636 (( status == 0 ))
3737 [[ -n " $output " ]]
3838}
3939
4040@test " prints warehouse stock counts" {
41- run uv run --with httpx --with beautifulsoup4 python warehouse_units.py
41+ run uv run --with= httpx --with= beautifulsoup4 python warehouse_units.py
4242 (( status == 0 ))
4343 [[ -n " $output " ]]
4444}
4545
4646@test " prints warehouse stock counts using regex" {
47- run uv run --with httpx --with beautifulsoup4 python warehouse_units_regex.py
47+ run uv run --with= httpx --with= beautifulsoup4 python warehouse_units_regex.py
4848 (( status == 0 ))
4949 [[ -n " $output " ]]
5050}
5151
5252@test " prints Guardian F1 titles with publish dates" {
53- run uv run --with httpx --with beautifulsoup4 python guardian_publish_dates.py
53+ run uv run --with= httpx --with= beautifulsoup4 python guardian_publish_dates.py
5454 (( status == 0 ))
5555 [[ -n " $output " ]]
5656}
@@ -62,51 +62,49 @@ setup() {
6262}
6363
6464@test " lists Wikipedia country links" {
65- run uv run --with httpx --with beautifulsoup4 python wikipedia_country_links.py
65+ run uv run --with= httpx --with= beautifulsoup4 python wikipedia_country_links.py
6666 (( status == 0 ))
6767 [[ -n " $output " ]]
6868}
6969
7070@test " lists Guardian F1 article links" {
71- run uv run --with httpx --with beautifulsoup4 python guardian_f1_links.py
71+ run uv run --with= httpx --with= beautifulsoup4 python guardian_f1_links.py
7272 (( status == 0 ))
7373 [[ -n " $output " ]]
7474}
7575
7676@test " prints Wikipedia calling codes" {
77- run uv run --with httpx --with beautifulsoup4 python wikipedia_calling_codes.py
77+ run uv run --with= httpx --with= beautifulsoup4 python wikipedia_calling_codes.py
7878 (( status == 0 ))
7979 [[ -n " $output " ]]
8080}
8181
8282@test " lists Guardian F1 authors" {
83- run uv run --with httpx --with beautifulsoup4 python guardian_f1_authors.py
83+ run uv run --with= httpx --with= beautifulsoup4 python guardian_f1_authors.py
8484 (( status == 0 ))
8585 [[ -n " $output " ]]
8686}
8787
8888@test " lists Python database jobs" {
89- run uv run --with httpx --with beautifulsoup4 python python_jobs_database.py
89+ run uv run --with= httpx --with= beautifulsoup4 python python_jobs_database.py
9090 (( status == 0 ))
9191 [[ -n " $output " ]]
9292}
9393
9494@test " finds the shortest CNN sports article" {
95- run uv run --with httpx --with beautifulsoup4 python cnn_sports_shortest_article.py
95+ run uv run --with= httpx --with= beautifulsoup4 python cnn_sports_shortest_article.py
9696 (( status == 0 ))
9797 [[ -n " $output " ]]
9898}
9999
100100@test " scrapes F1 Academy driver details with Crawlee" {
101- run uv run --with httpx --with beautifulsoup4 --with crawlee python crawlee_f1_drivers.py
102- (( status == 0 ))
101+ run uv run --with=crawlee[beautifulsoup] python crawlee_f1_drivers.py
103102 [[ -n " $output " || -f dataset.json ]]
104103 rm -f dataset.json
105104}
106105
107106@test " scrapes Netflix ratings with Crawlee" {
108- run uv run --with httpx --with beautifulsoup4 --with crawlee python crawlee_netflix_ratings.py
109- (( status == 0 ))
107+ run uv run --with=crawlee[beautifulsoup] python crawlee_netflix_ratings.py
110108 [[ -n " $output " || -f dataset.json ]]
111109 rm -f dataset.json
112110}
0 commit comments