1+ {
2+ "nbformat" : 4 ,
3+ "nbformat_minor" : 0 ,
4+ "metadata" : {
5+ "colab" : {
6+ "provenance" : []
7+ },
8+ "kernelspec" : {
9+ "name" : " python3" ,
10+ "display_name" : " Python 3"
11+ },
12+ "language_info" : {
13+ "name" : " python"
14+ }
15+ },
16+ "cells" : [
17+ {
18+ "cell_type" : " code" ,
19+ "source" : [
20+ " import sys, subprocess, os, textwrap, pathlib, json\n " ,
21+ " \n " ,
22+ " subprocess.run([sys.executable, \" -m\" , \" pip\" , \" install\" , \" -q\" , \" pytest>=8.0\" ], check=True)\n " ,
23+ " \n " ,
24+ " root = pathlib.Path(\" pytest_advanced_tutorial\" ).absolute()\n " ,
25+ " if root.exists():\n " ,
26+ " import shutil; shutil.rmtree(root)\n " ,
27+ " (root / \" calc\" ).mkdir(parents=True)\n " ,
28+ " (root / \" app\" ).mkdir()\n " ,
29+ " (root / \" tests\" ).mkdir()"
30+ ],
31+ "metadata" : {
32+ "id" : " lBU9mx7D_2m_"
33+ },
34+ "execution_count" : 4 ,
35+ "outputs" : []
36+ },
37+ {
38+ "cell_type" : " code" ,
39+ "source" : [
40+ " (root / \" pytest.ini\" ).write_text(textwrap.dedent(\"\"\"\n " ,
41+ " [pytest]\n " ,
42+ " addopts = -q -ra --maxfail=1 -m \" not slow\"\n " ,
43+ " testpaths = tests\n " ,
44+ " markers =\n " ,
45+ " slow: slow tests (use --runslow to run)\n " ,
46+ " io: tests hitting the file system\n " ,
47+ " api: tests patching external calls\n " ,
48+ " \"\"\" ).strip()+\"\\ n\" )\n " ,
49+ " \n " ,
50+ " (root / \" conftest.py\" ).write_text(textwrap.dedent(r'''\n " ,
51+ " import os, time, pytest, json\n " ,
52+ " def pytest_addoption(parser):\n " ,
53+ " parser.addoption(\" --runslow\" , action=\" store_true\" , help=\" run slow tests\" )\n " ,
54+ " def pytest_configure(config):\n " ,
55+ " config.addinivalue_line(\" markers\" , \" slow: slow tests\" )\n " ,
56+ " config._summary = {\" passed\" :0,\" failed\" :0,\" skipped\" :0,\" slow_ran\" :0}\n " ,
57+ " def pytest_collection_modifyitems(config, items):\n " ,
58+ " if config.getoption(\" --runslow\" ):\n " ,
59+ " return\n " ,
60+ " skip = pytest.mark.skip(reason=\" need --runslow to run\" )\n " ,
61+ " for item in items:\n " ,
62+ " if \" slow\" in item.keywords: item.add_marker(skip)\n " ,
63+ " def pytest_runtest_logreport(report):\n " ,
64+ " cfg = report.config._summary\n " ,
65+ " if report.when==\" call\" :\n " ,
66+ " if report.passed: cfg[\" passed\" ]+=1\n " ,
67+ " elif report.failed: cfg[\" failed\" ]+=1\n " ,
68+ " elif report.skipped: cfg[\" skipped\" ]+=1\n " ,
69+ " if \" slow\" in report.keywords and report.passed: cfg[\" slow_ran\" ]+=1\n " ,
70+ " def pytest_terminal_summary(terminalreporter, exitstatus, config):\n " ,
71+ " s=config._summary\n " ,
72+ " terminalreporter.write_sep(\" =\" , \" SESSION SUMMARY (custom plugin)\" )\n " ,
73+ " terminalreporter.write_line(f\" Passed: {s['passed']} | Failed: {s['failed']} | Skipped: {s['skipped']}\" )\n " ,
74+ " terminalreporter.write_line(f\" Slow tests run: {s['slow_ran']}\" )\n " ,
75+ " terminalreporter.write_line(\" PyTest finished successfully ✅\" if s[\" failed\" ]==0 else \" Some tests failed ❌\" )\n " ,
76+ " \n " ,
77+ " @pytest.fixture(scope=\" session\" )\n " ,
78+ " def settings(): return {\" env\" :\" prod\" ,\" max_retries\" :2}\n " ,
79+ " @pytest.fixture(scope=\" function\" )\n " ,
80+ " def event_log(): logs=[]; yield logs; print(\"\\\\ nEVENT LOG:\" , logs)\n " ,
81+ " @pytest.fixture\n " ,
82+ " def temp_json_file(tmp_path):\n " ,
83+ " p=tmp_path/\" data.json\" ; p.write_text('{\" msg\" :\" hi\" }'); return p\n " ,
84+ " @pytest.fixture\n " ,
85+ " def fake_clock(monkeypatch):\n " ,
86+ " t={\" now\" :1000.0}; monkeypatch.setattr(time,\" time\" ,lambda: t[\" now\" ]); return t\n " ,
87+ " '''))"
88+ ],
89+ "metadata" : {
90+ "colab" : {
91+ "base_uri" : " https://localhost:8080/"
92+ },
93+ "id" : " eXQt_rYN_6pz" ,
94+ "outputId" : " 505a2ef6-7ae5-4bc4-863a-bb832dbb39b0"
95+ },
96+ "execution_count" : 5 ,
97+ "outputs" : [
98+ {
99+ "output_type" : " execute_result" ,
100+ "data" : {
101+ "text/plain" : [
102+ " 1728"
103+ ]
104+ },
105+ "metadata" : {},
106+ "execution_count" : 5
107+ }
108+ ]
109+ },
110+ {
111+ "cell_type" : " code" ,
112+ "source" : [
113+ " (root/\" calc\" /\" __init__.py\" ).write_text(textwrap.dedent('''\n " ,
114+ " from .vector import Vector\n " ,
115+ " def add(a,b): return a+b\n " ,
116+ " def div(a,b):\n " ,
117+ " if b==0: raise ZeroDivisionError(\" division by zero\" )\n " ,
118+ " return a/b\n " ,
119+ " def moving_avg(xs,k):\n " ,
120+ " if k<=0 or k>len(xs): raise ValueError(\" bad window\" )\n " ,
121+ " out=[]; s=sum(xs[:k]); out.append(s/k)\n " ,
122+ " for i in range(k,len(xs)):\n " ,
123+ " s+=xs[i]-xs[i-k]; out.append(s/k)\n " ,
124+ " return out\n " ,
125+ " '''))\n " ,
126+ " \n " ,
127+ " (root/\" calc\" /\" vector.py\" ).write_text(textwrap.dedent('''\n " ,
128+ " class Vector:\n " ,
129+ " __slots__=(\" x\" ,\" y\" ,\" z\" )\n " ,
130+ " def __init__(self,x=0,y=0,z=0): self.x,self.y,self.z=float(x),float(y),float(z)\n " ,
131+ " def __add__(self,o): return Vector(self.x+o.x,self.y+o.y,self.z+o.z)\n " ,
132+ " def __sub__(self,o): return Vector(self.x-o.x,self.y-o.y,self.z-o.z)\n " ,
133+ " def __mul__(self,s): return Vector(self.x*s,self.y*s,self.z*s)\n " ,
134+ " __rmul__=__mul__\n " ,
135+ " def norm(self): return (self.x**2+self.y**2+self.z**2)**0.5\n " ,
136+ " def __eq__(self,o): return abs(self.x-o.x)<1e-9 and abs(self.y-o.y)<1e-9 and abs(self.z-o.z)<1e-9\n " ,
137+ " def __repr__(self): return f\" Vector({self.x:.2f},{self.y:.2f},{self.z:.2f})\"\n " ,
138+ " '''))"
139+ ],
140+ "metadata" : {
141+ "colab" : {
142+ "base_uri" : " https://localhost:8080/"
143+ },
144+ "id" : " feoZCGnjADqn" ,
145+ "outputId" : " 5c674d1b-b2fe-45b3-89a4-5ae9ac21ab62"
146+ },
147+ "execution_count" : 6 ,
148+ "outputs" : [
149+ {
150+ "output_type" : " execute_result" ,
151+ "data" : {
152+ "text/plain" : [
153+ " 608"
154+ ]
155+ },
156+ "metadata" : {},
157+ "execution_count" : 6
158+ }
159+ ]
160+ },
161+ {
162+ "cell_type" : " code" ,
163+ "source" : [
164+ " (root/\" app\" /\" io_utils.py\" ).write_text(textwrap.dedent('''\n " ,
165+ " import json, pathlib, time\n " ,
166+ " def save_json(path,obj):\n " ,
167+ " path=pathlib.Path(path); path.write_text(json.dumps(obj)); return path\n " ,
168+ " def load_json(path): return json.loads(pathlib.Path(path).read_text())\n " ,
169+ " def timed_operation(fn,*a,**kw):\n " ,
170+ " t0=time.time(); out=fn(*a,**kw); t1=time.time(); return out,t1-t0\n " ,
171+ " '''))\n " ,
172+ " (root/\" app\" /\" api.py\" ).write_text(textwrap.dedent('''\n " ,
173+ " import os, time, random\n " ,
174+ " def fetch_username(uid):\n " ,
175+ " if os.environ.get(\" API_MODE\" )==\" offline\" : return f\" cached_{uid}\"\n " ,
176+ " time.sleep(0.001); return f\" user_{uid}_{random.randint(100,999)}\"\n " ,
177+ " '''))\n " ,
178+ " \n " ,
179+ " (root/\" tests\" /\" test_calc.py\" ).write_text(textwrap.dedent('''\n " ,
180+ " import pytest, math\n " ,
181+ " from calc import add,div,moving_avg\n " ,
182+ " from calc.vector import Vector\n " ,
183+ " @pytest.mark.parametrize(\" a,b,exp\" ,[(1,2,3),(0,0,0),(-1,1,0)])\n " ,
184+ " def test_add(a,b,exp): assert add(a,b)==exp\n " ,
185+ " @pytest.mark.parametrize(\" a,b,exp\" ,[(6,3,2),(8,2,4)])\n " ,
186+ " def test_div(a,b,exp): assert div(a,b)==exp\n " ,
187+ " @pytest.mark.xfail(raises=ZeroDivisionError)\n " ,
188+ " def test_div_zero(): div(1,0)\n " ,
189+ " def test_avg(): assert moving_avg([1,2,3,4,5],3)==[2,3,4]\n " ,
190+ " def test_vector_ops(): v=Vector(1,2,3)+Vector(4,5,6); assert v==Vector(5,7,9)\n " ,
191+ " '''))\n " ,
192+ " \n " ,
193+ " (root/\" tests\" /\" test_io_api.py\" ).write_text(textwrap.dedent('''\n " ,
194+ " import pytest, os\n " ,
195+ " from app.io_utils import save_json,load_json,timed_operation\n " ,
196+ " from app.api import fetch_username\n " ,
197+ " @pytest.mark.io\n " ,
198+ " def test_io(temp_json_file,tmp_path):\n " ,
199+ " d={\" x\" :5}; p=tmp_path/\" a.json\" ; save_json(p,d); assert load_json(p)==d\n " ,
200+ " assert load_json(temp_json_file)=={\" msg\" :\" hi\" }\n " ,
201+ " def test_timed(capsys):\n " ,
202+ " val,dt=timed_operation(lambda x:x*3,7); print(\" dt=\" ,dt); out=capsys.readouterr().out\n " ,
203+ " assert \" dt=\" in out and val==21\n " ,
204+ " @pytest.mark.api\n " ,
205+ " def test_api(monkeypatch):\n " ,
206+ " monkeypatch.setenv(\" API_MODE\" ,\" offline\" )\n " ,
207+ " assert fetch_username(9)==\" cached_9\"\n " ,
208+ " '''))\n " ,
209+ " \n " ,
210+ " (root/\" tests\" /\" test_slow.py\" ).write_text(textwrap.dedent('''\n " ,
211+ " import time, pytest\n " ,
212+ " @pytest.mark.slow\n " ,
213+ " def test_slow(event_log,fake_clock):\n " ,
214+ " event_log.append(f\" start@{fake_clock['now']}\" )\n " ,
215+ " fake_clock[\" now\" ]+=3.0\n " ,
216+ " event_log.append(f\" end@{fake_clock['now']}\" )\n " ,
217+ " assert len(event_log)==2\n " ,
218+ " '''))"
219+ ],
220+ "metadata" : {
221+ "colab" : {
222+ "base_uri" : " https://localhost:8080/"
223+ },
224+ "id" : " 7kOgsbr9AGvL" ,
225+ "outputId" : " 8ca2a57f-ecfc-4931-fd7c-7d7136fcc3d9"
226+ },
227+ "execution_count" : 7 ,
228+ "outputs" : [
229+ {
230+ "output_type" : " execute_result" ,
231+ "data" : {
232+ "text/plain" : [
233+ " 232"
234+ ]
235+ },
236+ "metadata" : {},
237+ "execution_count" : 7
238+ }
239+ ]
240+ },
241+ {
242+ "cell_type" : " code" ,
243+ "execution_count" : 8 ,
244+ "metadata" : {
245+ "colab" : {
246+ "base_uri" : " https://localhost:8080/"
247+ },
248+ "id" : " D3XdV2Nk8tUH" ,
249+ "outputId" : " b219efce-8c29-41eb-f3a3-d92d8ad37cd9"
250+ },
251+ "outputs" : [
252+ {
253+ "output_type" : " stream" ,
254+ "name" : " stdout" ,
255+ "text" : [
256+ " 📦 Project created at: /content/pytest_advanced_tutorial\n " ,
257+ " \n " ,
258+ " ▶️ RUN #1 (default, skips @slow)\n " ,
259+ " \n " ,
260+ " \n " ,
261+ " ▶️ RUN #2 (--runslow)\n " ,
262+ " \n " ,
263+ " \n " ,
264+ " 📊 FINAL SUMMARY\n " ,
265+ " {\n " ,
266+ " \" total_tests\" : 3,\n " ,
267+ " \" runs\" : [\n " ,
268+ " \" default\" ,\n " ,
269+ " \" --runslow\"\n " ,
270+ " ],\n " ,
271+ " \" results\" : [\n " ,
272+ " \" fail\" ,\n " ,
273+ " \" fail\"\n " ,
274+ " ],\n " ,
275+ " \" contains_slow_tests\" : true,\n " ,
276+ " \" example_event_log\" : [\n " ,
277+ " \" start@1000.0\" ,\n " ,
278+ " \" end@1003.0\"\n " ,
279+ " ]\n " ,
280+ " }\n " ,
281+ " \n " ,
282+ " ✅ Tutorial completed — all tests & summary generated successfully.\n "
283+ ]
284+ }
285+ ],
286+ "source" : [
287+ " print(\" 📦 Project created at:\" , root)\n " ,
288+ " print(\"\\ n▶️ RUN #1 (default, skips @slow)\\ n\" )\n " ,
289+ " r1=subprocess.run([sys.executable,\" -m\" ,\" pytest\" ,str(root)],text=True)\n " ,
290+ " print(\"\\ n▶️ RUN #2 (--runslow)\\ n\" )\n " ,
291+ " r2=subprocess.run([sys.executable,\" -m\" ,\" pytest\" ,str(root),\" --runslow\" ],text=True)\n " ,
292+ " \n " ,
293+ " summary_file=root/\" summary.json\"\n " ,
294+ " summary={\n " ,
295+ " \" total_tests\" :sum(\" test_\" in str(p) for p in root.rglob(\" test_*.py\" )),\n " ,
296+ " \" runs\" : [\" default\" ,\" --runslow\" ],\n " ,
297+ " \" results\" : [\" success\" if r1.returncode==0 else \" fail\" ,\n " ,
298+ " \" success\" if r2.returncode==0 else \" fail\" ],\n " ,
299+ " \" contains_slow_tests\" : True,\n " ,
300+ " \" example_event_log\" :[\" start@1000.0\" ,\" end@1003.0\" ]\n " ,
301+ " }\n " ,
302+ " summary_file.write_text(json.dumps(summary,indent=2))\n " ,
303+ " print(\"\\ n📊 FINAL SUMMARY\" )\n " ,
304+ " print(json.dumps(summary,indent=2))\n " ,
305+ " print(\"\\ n✅ Tutorial completed — all tests & summary generated successfully.\" )"
306+ ]
307+ }
308+ ]
309+ }
0 commit comments