diff --git a/Python.gitignore b/Python.gitignore
new file mode 100644
index 00000000..e1af3f80
--- /dev/null
+++ b/Python.gitignore
@@ -0,0 +1,165 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# Kaggle API token
+.kaggle/kaggle.json
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+#pdm.lock
+# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
+# in version control.
+# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
+.pdm.toml
+.pdm-python
+.pdm-build/
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+#.idea/
diff --git a/TP1/1 - Practical number 1.ipynb b/TP1/1 - Practical number 1.ipynb
index bc5d11b8..31e61b42 100644
--- a/TP1/1 - Practical number 1.ipynb
+++ b/TP1/1 - Practical number 1.ipynb
@@ -17,64 +17,64 @@
"source": [
"- **Question 1:** *Name three types of API protocols. Briefly explain the primary use of each.*\n",
"\n",
- " - \n",
+ " - REST : Ressources oriented based on CRUD actions (Create, Read, Update, Delete). It relies on stateless, client-server and communication over HTTP. It is known for its simplicity and scalability. \n",
"\n",
- " - \n",
+ " - SOAP : used for exchanging structured information in web services. It uses XML for message format and relies on application layer protocol like HTTP and SMTP. It is known for its robustness and security features. \n",
"\n",
- " - \n",
+ " - gRPC : high-performance open-source framework developped by google. It uses HTTP/2. It provides features such as authentication, load balancing... in microservices architectures due to its support for multiple programming language. But it's not common nowadays because of the lack of existing application using HTTP/2.\n",
"\n",
" \n",
"\n",
"\n",
"- **Question 2:** *What are the HTTP response code families? And what do they mean?*\n",
"\n",
- " - \n",
- " - \n",
- " - \n",
- " - \n",
- " - \n",
+ " - 1XX (Informational) : The request was received and the process continuing. This code family is only for informational purposes. \n",
+ " - 2XX (Success) : It indicates the success of a request. =Action successfully received, understood and accepted.\n",
+ " - 3XX (Redirection) : Informs the client that it must take additional action to complete the request. \n",
+ " - 4XX (Client Error) : The client has made an error. The request contains bad syntax or cannot be fulfilled. \n",
+ " - 5XX (Server Error) : Error on server side or he is incapable of performing the request. \n",
"\n",
" Understanding these families helps developers diagnose and troubleshoot issues during API interactions.\n",
"\n",
"- **Question 3:** *What do the HTTP response codes 201, 401, and 404 mean?*\n",
"\n",
- " - **201:** \n",
- " - **401:** \n",
- " - **404:** \n",
+ " - **201:** Created : The request successfully created a new ressource. Tipically used in response to POST request. \n",
+ " - **401:** Unauthorized : The client must authenticate itself to get the requested response. \n",
+ " - **404:** Not found : The server cannot find the requested ressource. \n",
"\n",
"- **Question 4:** *Name the 4 basic HTTP verbs.*\n",
"\n",
- " - \n",
- " - \n",
- " - \n",
- " - \n",
+ " - POST : Create a ressource. \n",
+ " - GET : Obtain a ressource.\n",
+ " - PUT : Modify a ressoure entirely. \n",
+ " - DELETE : Remove a ressource.\n",
"\n",
"- **Question 5:** *Explain the difference between PUT and PATCH?*\n",
"\n",
- " - **PUT:** :\n",
+ " - **PUT:** : Modify a ressource entirely. It needs to take all the information about a ressource to modify it in consequence. \n",
"\n",
- " - **PATCH:** :\n",
+ " - **PATCH:** : Modify a ressource partially or entirely. It changes only the argument of the ressource indicated without changing the others. \n",
"\n",
"- **Question 6:** *Name at least two data formats commonly used in API exchanges.*\n",
"\n",
- " - \n",
+ " - JSON : used for data interchange between clients and servers. It can be easily parsed and generated by many programming language. \n",
"\n",
- " - \n",
+ " - XML : provides a flexible way to create information formats and electronically share structured data via the internet. (Often used with SOAP)\n",
"\n",
"- **Question 7:** *How can you verify the validity of a resource without getting the entire response?*\n",
"\n",
- " - \n",
+ " - The use of HTTP HEAD method is similar to get but does not take the body of the response. Only the headers wich contain the status, content type...\n",
"\n",
"- **Question 8:** *What are the main concepts of REST? (name them)*\n",
"\n",
- " - \n",
- " - \n",
- " - \n",
- " - \n",
+ " - Identifying ressources\n",
+ " - Ressource handling\n",
+ " - Self-describing messages\n",
+ " - Hypermedia\n",
"\n",
"- **Question 9:** *Can you explain one of the main concepts of your choice from among those you mention? (Give an example if possible)*\n",
"\n",
- " - \n",
+ " - In REST, each request or response contains metadata that provide context about the message content and how to handle it. It's like a header. \n",
"\n",
"In the subsequent sections, we will delve into practical exercises to apply and deepen our understanding of these concepts using SOAP, REST, and GraphQL APIs.\n"
]
@@ -130,9 +130,24 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 3,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "\n",
+ "\n",
+ " \n",
+ " \n",
+ " Ottawa\n",
+ " \n",
+ " \n",
+ "\n"
+ ]
+ }
+ ],
"source": [
"import requests\n",
"# SOAP request URL\n",
@@ -142,7 +157,7 @@
"payload = \"\"\"\n",
" \n",
" \n",
- " US\n",
+ " CA\n",
" \n",
" \n",
" \"\"\"\n",
@@ -225,19 +240,1244 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 20,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "{'count': 82,\n",
+ " 'next': 'https://swapi.dev/api/people/?page=2',\n",
+ " 'previous': None,\n",
+ " 'results': [{'name': 'Luke Skywalker',\n",
+ " 'height': '172',\n",
+ " 'mass': '77',\n",
+ " 'hair_color': 'blond',\n",
+ " 'skin_color': 'fair',\n",
+ " 'eye_color': 'blue',\n",
+ " 'birth_year': '19BBY',\n",
+ " 'gender': 'male',\n",
+ " 'homeworld': 'https://swapi.dev/api/planets/1/',\n",
+ " 'films': ['https://swapi.dev/api/films/1/',\n",
+ " 'https://swapi.dev/api/films/2/',\n",
+ " 'https://swapi.dev/api/films/3/',\n",
+ " 'https://swapi.dev/api/films/6/'],\n",
+ " 'species': [],\n",
+ " 'vehicles': ['https://swapi.dev/api/vehicles/14/',\n",
+ " 'https://swapi.dev/api/vehicles/30/'],\n",
+ " 'starships': ['https://swapi.dev/api/starships/12/',\n",
+ " 'https://swapi.dev/api/starships/22/'],\n",
+ " 'created': '2014-12-09T13:50:51.644000Z',\n",
+ " 'edited': '2014-12-20T21:17:56.891000Z',\n",
+ " 'url': 'https://swapi.dev/api/people/1/'},\n",
+ " {'name': 'C-3PO',\n",
+ " 'height': '167',\n",
+ " 'mass': '75',\n",
+ " 'hair_color': 'n/a',\n",
+ " 'skin_color': 'gold',\n",
+ " 'eye_color': 'yellow',\n",
+ " 'birth_year': '112BBY',\n",
+ " 'gender': 'n/a',\n",
+ " 'homeworld': 'https://swapi.dev/api/planets/1/',\n",
+ " 'films': ['https://swapi.dev/api/films/1/',\n",
+ " 'https://swapi.dev/api/films/2/',\n",
+ " 'https://swapi.dev/api/films/3/',\n",
+ " 'https://swapi.dev/api/films/4/',\n",
+ " 'https://swapi.dev/api/films/5/',\n",
+ " 'https://swapi.dev/api/films/6/'],\n",
+ " 'species': ['https://swapi.dev/api/species/2/'],\n",
+ " 'vehicles': [],\n",
+ " 'starships': [],\n",
+ " 'created': '2014-12-10T15:10:51.357000Z',\n",
+ " 'edited': '2014-12-20T21:17:50.309000Z',\n",
+ " 'url': 'https://swapi.dev/api/people/2/'},\n",
+ " {'name': 'R2-D2',\n",
+ " 'height': '96',\n",
+ " 'mass': '32',\n",
+ " 'hair_color': 'n/a',\n",
+ " 'skin_color': 'white, blue',\n",
+ " 'eye_color': 'red',\n",
+ " 'birth_year': '33BBY',\n",
+ " 'gender': 'n/a',\n",
+ " 'homeworld': 'https://swapi.dev/api/planets/8/',\n",
+ " 'films': ['https://swapi.dev/api/films/1/',\n",
+ " 'https://swapi.dev/api/films/2/',\n",
+ " 'https://swapi.dev/api/films/3/',\n",
+ " 'https://swapi.dev/api/films/4/',\n",
+ " 'https://swapi.dev/api/films/5/',\n",
+ " 'https://swapi.dev/api/films/6/'],\n",
+ " 'species': ['https://swapi.dev/api/species/2/'],\n",
+ " 'vehicles': [],\n",
+ " 'starships': [],\n",
+ " 'created': '2014-12-10T15:11:50.376000Z',\n",
+ " 'edited': '2014-12-20T21:17:50.311000Z',\n",
+ " 'url': 'https://swapi.dev/api/people/3/'},\n",
+ " {'name': 'Darth Vader',\n",
+ " 'height': '202',\n",
+ " 'mass': '136',\n",
+ " 'hair_color': 'none',\n",
+ " 'skin_color': 'white',\n",
+ " 'eye_color': 'yellow',\n",
+ " 'birth_year': '41.9BBY',\n",
+ " 'gender': 'male',\n",
+ " 'homeworld': 'https://swapi.dev/api/planets/1/',\n",
+ " 'films': ['https://swapi.dev/api/films/1/',\n",
+ " 'https://swapi.dev/api/films/2/',\n",
+ " 'https://swapi.dev/api/films/3/',\n",
+ " 'https://swapi.dev/api/films/6/'],\n",
+ " 'species': [],\n",
+ " 'vehicles': [],\n",
+ " 'starships': ['https://swapi.dev/api/starships/13/'],\n",
+ " 'created': '2014-12-10T15:18:20.704000Z',\n",
+ " 'edited': '2014-12-20T21:17:50.313000Z',\n",
+ " 'url': 'https://swapi.dev/api/people/4/'},\n",
+ " {'name': 'Leia Organa',\n",
+ " 'height': '150',\n",
+ " 'mass': '49',\n",
+ " 'hair_color': 'brown',\n",
+ " 'skin_color': 'light',\n",
+ " 'eye_color': 'brown',\n",
+ " 'birth_year': '19BBY',\n",
+ " 'gender': 'female',\n",
+ " 'homeworld': 'https://swapi.dev/api/planets/2/',\n",
+ " 'films': ['https://swapi.dev/api/films/1/',\n",
+ " 'https://swapi.dev/api/films/2/',\n",
+ " 'https://swapi.dev/api/films/3/',\n",
+ " 'https://swapi.dev/api/films/6/'],\n",
+ " 'species': [],\n",
+ " 'vehicles': ['https://swapi.dev/api/vehicles/30/'],\n",
+ " 'starships': [],\n",
+ " 'created': '2014-12-10T15:20:09.791000Z',\n",
+ " 'edited': '2014-12-20T21:17:50.315000Z',\n",
+ " 'url': 'https://swapi.dev/api/people/5/'},\n",
+ " {'name': 'Owen Lars',\n",
+ " 'height': '178',\n",
+ " 'mass': '120',\n",
+ " 'hair_color': 'brown, grey',\n",
+ " 'skin_color': 'light',\n",
+ " 'eye_color': 'blue',\n",
+ " 'birth_year': '52BBY',\n",
+ " 'gender': 'male',\n",
+ " 'homeworld': 'https://swapi.dev/api/planets/1/',\n",
+ " 'films': ['https://swapi.dev/api/films/1/',\n",
+ " 'https://swapi.dev/api/films/5/',\n",
+ " 'https://swapi.dev/api/films/6/'],\n",
+ " 'species': [],\n",
+ " 'vehicles': [],\n",
+ " 'starships': [],\n",
+ " 'created': '2014-12-10T15:52:14.024000Z',\n",
+ " 'edited': '2014-12-20T21:17:50.317000Z',\n",
+ " 'url': 'https://swapi.dev/api/people/6/'},\n",
+ " {'name': 'Beru Whitesun lars',\n",
+ " 'height': '165',\n",
+ " 'mass': '75',\n",
+ " 'hair_color': 'brown',\n",
+ " 'skin_color': 'light',\n",
+ " 'eye_color': 'blue',\n",
+ " 'birth_year': '47BBY',\n",
+ " 'gender': 'female',\n",
+ " 'homeworld': 'https://swapi.dev/api/planets/1/',\n",
+ " 'films': ['https://swapi.dev/api/films/1/',\n",
+ " 'https://swapi.dev/api/films/5/',\n",
+ " 'https://swapi.dev/api/films/6/'],\n",
+ " 'species': [],\n",
+ " 'vehicles': [],\n",
+ " 'starships': [],\n",
+ " 'created': '2014-12-10T15:53:41.121000Z',\n",
+ " 'edited': '2014-12-20T21:17:50.319000Z',\n",
+ " 'url': 'https://swapi.dev/api/people/7/'},\n",
+ " {'name': 'R5-D4',\n",
+ " 'height': '97',\n",
+ " 'mass': '32',\n",
+ " 'hair_color': 'n/a',\n",
+ " 'skin_color': 'white, red',\n",
+ " 'eye_color': 'red',\n",
+ " 'birth_year': 'unknown',\n",
+ " 'gender': 'n/a',\n",
+ " 'homeworld': 'https://swapi.dev/api/planets/1/',\n",
+ " 'films': ['https://swapi.dev/api/films/1/'],\n",
+ " 'species': ['https://swapi.dev/api/species/2/'],\n",
+ " 'vehicles': [],\n",
+ " 'starships': [],\n",
+ " 'created': '2014-12-10T15:57:50.959000Z',\n",
+ " 'edited': '2014-12-20T21:17:50.321000Z',\n",
+ " 'url': 'https://swapi.dev/api/people/8/'},\n",
+ " {'name': 'Biggs Darklighter',\n",
+ " 'height': '183',\n",
+ " 'mass': '84',\n",
+ " 'hair_color': 'black',\n",
+ " 'skin_color': 'light',\n",
+ " 'eye_color': 'brown',\n",
+ " 'birth_year': '24BBY',\n",
+ " 'gender': 'male',\n",
+ " 'homeworld': 'https://swapi.dev/api/planets/1/',\n",
+ " 'films': ['https://swapi.dev/api/films/1/'],\n",
+ " 'species': [],\n",
+ " 'vehicles': [],\n",
+ " 'starships': ['https://swapi.dev/api/starships/12/'],\n",
+ " 'created': '2014-12-10T15:59:50.509000Z',\n",
+ " 'edited': '2014-12-20T21:17:50.323000Z',\n",
+ " 'url': 'https://swapi.dev/api/people/9/'},\n",
+ " {'name': 'Obi-Wan Kenobi',\n",
+ " 'height': '182',\n",
+ " 'mass': '77',\n",
+ " 'hair_color': 'auburn, white',\n",
+ " 'skin_color': 'fair',\n",
+ " 'eye_color': 'blue-gray',\n",
+ " 'birth_year': '57BBY',\n",
+ " 'gender': 'male',\n",
+ " 'homeworld': 'https://swapi.dev/api/planets/20/',\n",
+ " 'films': ['https://swapi.dev/api/films/1/',\n",
+ " 'https://swapi.dev/api/films/2/',\n",
+ " 'https://swapi.dev/api/films/3/',\n",
+ " 'https://swapi.dev/api/films/4/',\n",
+ " 'https://swapi.dev/api/films/5/',\n",
+ " 'https://swapi.dev/api/films/6/'],\n",
+ " 'species': [],\n",
+ " 'vehicles': ['https://swapi.dev/api/vehicles/38/'],\n",
+ " 'starships': ['https://swapi.dev/api/starships/48/',\n",
+ " 'https://swapi.dev/api/starships/59/',\n",
+ " 'https://swapi.dev/api/starships/64/',\n",
+ " 'https://swapi.dev/api/starships/65/',\n",
+ " 'https://swapi.dev/api/starships/74/'],\n",
+ " 'created': '2014-12-10T16:16:29.192000Z',\n",
+ " 'edited': '2014-12-20T21:17:50.325000Z',\n",
+ " 'url': 'https://swapi.dev/api/people/10/'}]}"
+ ]
+ },
+ "execution_count": 20,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
- "url = \".......\"\n",
+ "url = \"https://swapi.dev/api/\"\n",
+ "characters_url = \"https://swapi.dev/api/people/\"\n",
+ "films_url = \"https://swapi.dev/api/films/\"\n",
+ "planets_url = \"https://swapi.dev/api/planets/\"\n",
+ "\n",
"params = {\n",
"}\n",
"\n",
- "response = requests.get(url, params=params)\n",
+ "response = requests.get(characters_url, params=params)\n",
"data = response.json()\n",
"data"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "{'name': 'Luke Skywalker',\n",
+ " 'height': '172',\n",
+ " 'mass': '77',\n",
+ " 'hair_color': 'blond',\n",
+ " 'skin_color': 'fair',\n",
+ " 'eye_color': 'blue',\n",
+ " 'birth_year': '19BBY',\n",
+ " 'gender': 'male',\n",
+ " 'homeworld': 'https://swapi.dev/api/planets/1/',\n",
+ " 'films': ['https://swapi.dev/api/films/1/',\n",
+ " 'https://swapi.dev/api/films/2/',\n",
+ " 'https://swapi.dev/api/films/3/',\n",
+ " 'https://swapi.dev/api/films/6/'],\n",
+ " 'species': [],\n",
+ " 'vehicles': ['https://swapi.dev/api/vehicles/14/',\n",
+ " 'https://swapi.dev/api/vehicles/30/'],\n",
+ " 'starships': ['https://swapi.dev/api/starships/12/',\n",
+ " 'https://swapi.dev/api/starships/22/'],\n",
+ " 'created': '2014-12-09T13:50:51.644000Z',\n",
+ " 'edited': '2014-12-20T21:17:56.891000Z',\n",
+ " 'url': 'https://swapi.dev/api/people/1/'}"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "import requests\n",
+ "\n",
+ "def search_character(name):\n",
+ " search_url = \"https://swapi.dev/api/people\"\n",
+ " response = requests.get(search_url, params={\"search\": name})\n",
+ " if response.status_code == 200:\n",
+ " data = response.json()\n",
+ " if data['count'] > 0:\n",
+ " character = data['results'][0]\n",
+ " return character_info\n",
+ " else:\n",
+ " return \"Character not found.\"\n",
+ " else:\n",
+ " return \"Error in request.\"\n",
+ "\n",
+ "character_info = search_character(\"Luke Skywalker\")\n",
+ "display(character_info)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " name | \n",
+ " height | \n",
+ " mass | \n",
+ " hair_color | \n",
+ " skin_color | \n",
+ " eye_color | \n",
+ " birth_year | \n",
+ " gender | \n",
+ " homeworld | \n",
+ " films | \n",
+ " species | \n",
+ " vehicles | \n",
+ " starships | \n",
+ " created | \n",
+ " edited | \n",
+ " url | \n",
+ " film_id | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " | 0 | \n",
+ " Luke Skywalker | \n",
+ " 172 | \n",
+ " 77 | \n",
+ " blond | \n",
+ " fair | \n",
+ " blue | \n",
+ " 19BBY | \n",
+ " male | \n",
+ " https://swapi.dev/api/planets/1/ | \n",
+ " [https://swapi.dev/api/films/1/, https://swapi... | \n",
+ " [Human] | \n",
+ " [https://swapi.dev/api/vehicles/14/, https://s... | \n",
+ " [https://swapi.dev/api/starships/12/, https://... | \n",
+ " 2014-12-09T13:50:51.644000Z | \n",
+ " 2014-12-20T21:17:56.891000Z | \n",
+ " https://swapi.dev/api/people/1/ | \n",
+ " 2 | \n",
+ "
\n",
+ " \n",
+ " | 1 | \n",
+ " C-3PO | \n",
+ " 167 | \n",
+ " 75 | \n",
+ " n/a | \n",
+ " gold | \n",
+ " yellow | \n",
+ " 112BBY | \n",
+ " n/a | \n",
+ " https://swapi.dev/api/planets/1/ | \n",
+ " [https://swapi.dev/api/films/1/, https://swapi... | \n",
+ " [https://swapi.dev/api/species/2/] | \n",
+ " [] | \n",
+ " [] | \n",
+ " 2014-12-10T15:10:51.357000Z | \n",
+ " 2014-12-20T21:17:50.309000Z | \n",
+ " https://swapi.dev/api/people/2/ | \n",
+ " 2 | \n",
+ "
\n",
+ " \n",
+ " | 2 | \n",
+ " R2-D2 | \n",
+ " 96 | \n",
+ " 32 | \n",
+ " n/a | \n",
+ " white, blue | \n",
+ " red | \n",
+ " 33BBY | \n",
+ " n/a | \n",
+ " https://swapi.dev/api/planets/8/ | \n",
+ " [https://swapi.dev/api/films/1/, https://swapi... | \n",
+ " [https://swapi.dev/api/species/2/] | \n",
+ " [] | \n",
+ " [] | \n",
+ " 2014-12-10T15:11:50.376000Z | \n",
+ " 2014-12-20T21:17:50.311000Z | \n",
+ " https://swapi.dev/api/people/3/ | \n",
+ " 2 | \n",
+ "
\n",
+ " \n",
+ " | 3 | \n",
+ " Darth Vader | \n",
+ " 202 | \n",
+ " 136 | \n",
+ " none | \n",
+ " white | \n",
+ " yellow | \n",
+ " 41.9BBY | \n",
+ " male | \n",
+ " https://swapi.dev/api/planets/1/ | \n",
+ " [https://swapi.dev/api/films/1/, https://swapi... | \n",
+ " [Human] | \n",
+ " [] | \n",
+ " [https://swapi.dev/api/starships/13/] | \n",
+ " 2014-12-10T15:18:20.704000Z | \n",
+ " 2014-12-20T21:17:50.313000Z | \n",
+ " https://swapi.dev/api/people/4/ | \n",
+ " 2 | \n",
+ "
\n",
+ " \n",
+ " | 4 | \n",
+ " Leia Organa | \n",
+ " 150 | \n",
+ " 49 | \n",
+ " brown | \n",
+ " light | \n",
+ " brown | \n",
+ " 19BBY | \n",
+ " female | \n",
+ " https://swapi.dev/api/planets/2/ | \n",
+ " [https://swapi.dev/api/films/1/, https://swapi... | \n",
+ " [Human] | \n",
+ " [https://swapi.dev/api/vehicles/30/] | \n",
+ " [] | \n",
+ " 2014-12-10T15:20:09.791000Z | \n",
+ " 2014-12-20T21:17:50.315000Z | \n",
+ " https://swapi.dev/api/people/5/ | \n",
+ " 2 | \n",
+ "
\n",
+ " \n",
+ " | 5 | \n",
+ " Obi-Wan Kenobi | \n",
+ " 182 | \n",
+ " 77 | \n",
+ " auburn, white | \n",
+ " fair | \n",
+ " blue-gray | \n",
+ " 57BBY | \n",
+ " male | \n",
+ " https://swapi.dev/api/planets/20/ | \n",
+ " [https://swapi.dev/api/films/1/, https://swapi... | \n",
+ " [Human] | \n",
+ " [https://swapi.dev/api/vehicles/38/] | \n",
+ " [https://swapi.dev/api/starships/48/, https://... | \n",
+ " 2014-12-10T16:16:29.192000Z | \n",
+ " 2014-12-20T21:17:50.325000Z | \n",
+ " https://swapi.dev/api/people/10/ | \n",
+ " 2 | \n",
+ "
\n",
+ " \n",
+ " | 6 | \n",
+ " Chewbacca | \n",
+ " 228 | \n",
+ " 112 | \n",
+ " brown | \n",
+ " unknown | \n",
+ " blue | \n",
+ " 200BBY | \n",
+ " male | \n",
+ " https://swapi.dev/api/planets/14/ | \n",
+ " [https://swapi.dev/api/films/1/, https://swapi... | \n",
+ " [https://swapi.dev/api/species/3/] | \n",
+ " [https://swapi.dev/api/vehicles/19/] | \n",
+ " [https://swapi.dev/api/starships/10/, https://... | \n",
+ " 2014-12-10T16:42:45.066000Z | \n",
+ " 2014-12-20T21:17:50.332000Z | \n",
+ " https://swapi.dev/api/people/13/ | \n",
+ " 2 | \n",
+ "
\n",
+ " \n",
+ " | 7 | \n",
+ " Han Solo | \n",
+ " 180 | \n",
+ " 80 | \n",
+ " brown | \n",
+ " fair | \n",
+ " brown | \n",
+ " 29BBY | \n",
+ " male | \n",
+ " https://swapi.dev/api/planets/22/ | \n",
+ " [https://swapi.dev/api/films/1/, https://swapi... | \n",
+ " [Human] | \n",
+ " [] | \n",
+ " [https://swapi.dev/api/starships/10/, https://... | \n",
+ " 2014-12-10T16:49:14.582000Z | \n",
+ " 2014-12-20T21:17:50.334000Z | \n",
+ " https://swapi.dev/api/people/14/ | \n",
+ " 2 | \n",
+ "
\n",
+ " \n",
+ " | 8 | \n",
+ " Wedge Antilles | \n",
+ " 170 | \n",
+ " 77 | \n",
+ " brown | \n",
+ " fair | \n",
+ " hazel | \n",
+ " 21BBY | \n",
+ " male | \n",
+ " https://swapi.dev/api/planets/22/ | \n",
+ " [https://swapi.dev/api/films/1/, https://swapi... | \n",
+ " [Human] | \n",
+ " [https://swapi.dev/api/vehicles/14/] | \n",
+ " [https://swapi.dev/api/starships/12/] | \n",
+ " 2014-12-12T11:08:06.469000Z | \n",
+ " 2014-12-20T21:17:50.341000Z | \n",
+ " https://swapi.dev/api/people/18/ | \n",
+ " 2 | \n",
+ "
\n",
+ " \n",
+ " | 9 | \n",
+ " Yoda | \n",
+ " 66 | \n",
+ " 17 | \n",
+ " white | \n",
+ " green | \n",
+ " brown | \n",
+ " 896BBY | \n",
+ " male | \n",
+ " https://swapi.dev/api/planets/28/ | \n",
+ " [https://swapi.dev/api/films/2/, https://swapi... | \n",
+ " [https://swapi.dev/api/species/6/] | \n",
+ " [] | \n",
+ " [] | \n",
+ " 2014-12-15T12:26:01.042000Z | \n",
+ " 2014-12-20T21:17:50.345000Z | \n",
+ " https://swapi.dev/api/people/20/ | \n",
+ " 2 | \n",
+ "
\n",
+ " \n",
+ " | 10 | \n",
+ " Palpatine | \n",
+ " 170 | \n",
+ " 75 | \n",
+ " grey | \n",
+ " pale | \n",
+ " yellow | \n",
+ " 82BBY | \n",
+ " male | \n",
+ " https://swapi.dev/api/planets/8/ | \n",
+ " [https://swapi.dev/api/films/2/, https://swapi... | \n",
+ " [Human] | \n",
+ " [] | \n",
+ " [] | \n",
+ " 2014-12-15T12:48:05.971000Z | \n",
+ " 2014-12-20T21:17:50.347000Z | \n",
+ " https://swapi.dev/api/people/21/ | \n",
+ " 2 | \n",
+ "
\n",
+ " \n",
+ " | 11 | \n",
+ " Boba Fett | \n",
+ " 183 | \n",
+ " 78.2 | \n",
+ " black | \n",
+ " fair | \n",
+ " brown | \n",
+ " 31.5BBY | \n",
+ " male | \n",
+ " https://swapi.dev/api/planets/10/ | \n",
+ " [https://swapi.dev/api/films/2/, https://swapi... | \n",
+ " [Human] | \n",
+ " [] | \n",
+ " [https://swapi.dev/api/starships/21/] | \n",
+ " 2014-12-15T12:49:32.457000Z | \n",
+ " 2014-12-20T21:17:50.349000Z | \n",
+ " https://swapi.dev/api/people/22/ | \n",
+ " 2 | \n",
+ "
\n",
+ " \n",
+ " | 12 | \n",
+ " IG-88 | \n",
+ " 200 | \n",
+ " 140 | \n",
+ " none | \n",
+ " metal | \n",
+ " red | \n",
+ " 15BBY | \n",
+ " none | \n",
+ " https://swapi.dev/api/planets/28/ | \n",
+ " [https://swapi.dev/api/films/2/] | \n",
+ " [https://swapi.dev/api/species/2/] | \n",
+ " [] | \n",
+ " [] | \n",
+ " 2014-12-15T12:51:10.076000Z | \n",
+ " 2014-12-20T21:17:50.351000Z | \n",
+ " https://swapi.dev/api/people/23/ | \n",
+ " 2 | \n",
+ "
\n",
+ " \n",
+ " | 13 | \n",
+ " Bossk | \n",
+ " 190 | \n",
+ " 113 | \n",
+ " none | \n",
+ " green | \n",
+ " red | \n",
+ " 53BBY | \n",
+ " male | \n",
+ " https://swapi.dev/api/planets/29/ | \n",
+ " [https://swapi.dev/api/films/2/] | \n",
+ " [https://swapi.dev/api/species/7/] | \n",
+ " [] | \n",
+ " [] | \n",
+ " 2014-12-15T12:53:49.297000Z | \n",
+ " 2014-12-20T21:17:50.355000Z | \n",
+ " https://swapi.dev/api/people/24/ | \n",
+ " 2 | \n",
+ "
\n",
+ " \n",
+ " | 14 | \n",
+ " Lando Calrissian | \n",
+ " 177 | \n",
+ " 79 | \n",
+ " black | \n",
+ " dark | \n",
+ " brown | \n",
+ " 31BBY | \n",
+ " male | \n",
+ " https://swapi.dev/api/planets/30/ | \n",
+ " [https://swapi.dev/api/films/2/, https://swapi... | \n",
+ " [Human] | \n",
+ " [] | \n",
+ " [https://swapi.dev/api/starships/10/] | \n",
+ " 2014-12-15T12:56:32.683000Z | \n",
+ " 2014-12-20T21:17:50.357000Z | \n",
+ " https://swapi.dev/api/people/25/ | \n",
+ " 2 | \n",
+ "
\n",
+ " \n",
+ " | 15 | \n",
+ " Lobot | \n",
+ " 175 | \n",
+ " 79 | \n",
+ " none | \n",
+ " light | \n",
+ " blue | \n",
+ " 37BBY | \n",
+ " male | \n",
+ " https://swapi.dev/api/planets/6/ | \n",
+ " [https://swapi.dev/api/films/2/] | \n",
+ " [Human] | \n",
+ " [] | \n",
+ " [] | \n",
+ " 2014-12-15T13:01:57.178000Z | \n",
+ " 2014-12-20T21:17:50.359000Z | \n",
+ " https://swapi.dev/api/people/26/ | \n",
+ " 2 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " name height mass hair_color skin_color eye_color \\\n",
+ "0 Luke Skywalker 172 77 blond fair blue \n",
+ "1 C-3PO 167 75 n/a gold yellow \n",
+ "2 R2-D2 96 32 n/a white, blue red \n",
+ "3 Darth Vader 202 136 none white yellow \n",
+ "4 Leia Organa 150 49 brown light brown \n",
+ "5 Obi-Wan Kenobi 182 77 auburn, white fair blue-gray \n",
+ "6 Chewbacca 228 112 brown unknown blue \n",
+ "7 Han Solo 180 80 brown fair brown \n",
+ "8 Wedge Antilles 170 77 brown fair hazel \n",
+ "9 Yoda 66 17 white green brown \n",
+ "10 Palpatine 170 75 grey pale yellow \n",
+ "11 Boba Fett 183 78.2 black fair brown \n",
+ "12 IG-88 200 140 none metal red \n",
+ "13 Bossk 190 113 none green red \n",
+ "14 Lando Calrissian 177 79 black dark brown \n",
+ "15 Lobot 175 79 none light blue \n",
+ "\n",
+ " birth_year gender homeworld \\\n",
+ "0 19BBY male https://swapi.dev/api/planets/1/ \n",
+ "1 112BBY n/a https://swapi.dev/api/planets/1/ \n",
+ "2 33BBY n/a https://swapi.dev/api/planets/8/ \n",
+ "3 41.9BBY male https://swapi.dev/api/planets/1/ \n",
+ "4 19BBY female https://swapi.dev/api/planets/2/ \n",
+ "5 57BBY male https://swapi.dev/api/planets/20/ \n",
+ "6 200BBY male https://swapi.dev/api/planets/14/ \n",
+ "7 29BBY male https://swapi.dev/api/planets/22/ \n",
+ "8 21BBY male https://swapi.dev/api/planets/22/ \n",
+ "9 896BBY male https://swapi.dev/api/planets/28/ \n",
+ "10 82BBY male https://swapi.dev/api/planets/8/ \n",
+ "11 31.5BBY male https://swapi.dev/api/planets/10/ \n",
+ "12 15BBY none https://swapi.dev/api/planets/28/ \n",
+ "13 53BBY male https://swapi.dev/api/planets/29/ \n",
+ "14 31BBY male https://swapi.dev/api/planets/30/ \n",
+ "15 37BBY male https://swapi.dev/api/planets/6/ \n",
+ "\n",
+ " films \\\n",
+ "0 [https://swapi.dev/api/films/1/, https://swapi... \n",
+ "1 [https://swapi.dev/api/films/1/, https://swapi... \n",
+ "2 [https://swapi.dev/api/films/1/, https://swapi... \n",
+ "3 [https://swapi.dev/api/films/1/, https://swapi... \n",
+ "4 [https://swapi.dev/api/films/1/, https://swapi... \n",
+ "5 [https://swapi.dev/api/films/1/, https://swapi... \n",
+ "6 [https://swapi.dev/api/films/1/, https://swapi... \n",
+ "7 [https://swapi.dev/api/films/1/, https://swapi... \n",
+ "8 [https://swapi.dev/api/films/1/, https://swapi... \n",
+ "9 [https://swapi.dev/api/films/2/, https://swapi... \n",
+ "10 [https://swapi.dev/api/films/2/, https://swapi... \n",
+ "11 [https://swapi.dev/api/films/2/, https://swapi... \n",
+ "12 [https://swapi.dev/api/films/2/] \n",
+ "13 [https://swapi.dev/api/films/2/] \n",
+ "14 [https://swapi.dev/api/films/2/, https://swapi... \n",
+ "15 [https://swapi.dev/api/films/2/] \n",
+ "\n",
+ " species \\\n",
+ "0 [Human] \n",
+ "1 [https://swapi.dev/api/species/2/] \n",
+ "2 [https://swapi.dev/api/species/2/] \n",
+ "3 [Human] \n",
+ "4 [Human] \n",
+ "5 [Human] \n",
+ "6 [https://swapi.dev/api/species/3/] \n",
+ "7 [Human] \n",
+ "8 [Human] \n",
+ "9 [https://swapi.dev/api/species/6/] \n",
+ "10 [Human] \n",
+ "11 [Human] \n",
+ "12 [https://swapi.dev/api/species/2/] \n",
+ "13 [https://swapi.dev/api/species/7/] \n",
+ "14 [Human] \n",
+ "15 [Human] \n",
+ "\n",
+ " vehicles \\\n",
+ "0 [https://swapi.dev/api/vehicles/14/, https://s... \n",
+ "1 [] \n",
+ "2 [] \n",
+ "3 [] \n",
+ "4 [https://swapi.dev/api/vehicles/30/] \n",
+ "5 [https://swapi.dev/api/vehicles/38/] \n",
+ "6 [https://swapi.dev/api/vehicles/19/] \n",
+ "7 [] \n",
+ "8 [https://swapi.dev/api/vehicles/14/] \n",
+ "9 [] \n",
+ "10 [] \n",
+ "11 [] \n",
+ "12 [] \n",
+ "13 [] \n",
+ "14 [] \n",
+ "15 [] \n",
+ "\n",
+ " starships \\\n",
+ "0 [https://swapi.dev/api/starships/12/, https://... \n",
+ "1 [] \n",
+ "2 [] \n",
+ "3 [https://swapi.dev/api/starships/13/] \n",
+ "4 [] \n",
+ "5 [https://swapi.dev/api/starships/48/, https://... \n",
+ "6 [https://swapi.dev/api/starships/10/, https://... \n",
+ "7 [https://swapi.dev/api/starships/10/, https://... \n",
+ "8 [https://swapi.dev/api/starships/12/] \n",
+ "9 [] \n",
+ "10 [] \n",
+ "11 [https://swapi.dev/api/starships/21/] \n",
+ "12 [] \n",
+ "13 [] \n",
+ "14 [https://swapi.dev/api/starships/10/] \n",
+ "15 [] \n",
+ "\n",
+ " created edited \\\n",
+ "0 2014-12-09T13:50:51.644000Z 2014-12-20T21:17:56.891000Z \n",
+ "1 2014-12-10T15:10:51.357000Z 2014-12-20T21:17:50.309000Z \n",
+ "2 2014-12-10T15:11:50.376000Z 2014-12-20T21:17:50.311000Z \n",
+ "3 2014-12-10T15:18:20.704000Z 2014-12-20T21:17:50.313000Z \n",
+ "4 2014-12-10T15:20:09.791000Z 2014-12-20T21:17:50.315000Z \n",
+ "5 2014-12-10T16:16:29.192000Z 2014-12-20T21:17:50.325000Z \n",
+ "6 2014-12-10T16:42:45.066000Z 2014-12-20T21:17:50.332000Z \n",
+ "7 2014-12-10T16:49:14.582000Z 2014-12-20T21:17:50.334000Z \n",
+ "8 2014-12-12T11:08:06.469000Z 2014-12-20T21:17:50.341000Z \n",
+ "9 2014-12-15T12:26:01.042000Z 2014-12-20T21:17:50.345000Z \n",
+ "10 2014-12-15T12:48:05.971000Z 2014-12-20T21:17:50.347000Z \n",
+ "11 2014-12-15T12:49:32.457000Z 2014-12-20T21:17:50.349000Z \n",
+ "12 2014-12-15T12:51:10.076000Z 2014-12-20T21:17:50.351000Z \n",
+ "13 2014-12-15T12:53:49.297000Z 2014-12-20T21:17:50.355000Z \n",
+ "14 2014-12-15T12:56:32.683000Z 2014-12-20T21:17:50.357000Z \n",
+ "15 2014-12-15T13:01:57.178000Z 2014-12-20T21:17:50.359000Z \n",
+ "\n",
+ " url film_id \n",
+ "0 https://swapi.dev/api/people/1/ 2 \n",
+ "1 https://swapi.dev/api/people/2/ 2 \n",
+ "2 https://swapi.dev/api/people/3/ 2 \n",
+ "3 https://swapi.dev/api/people/4/ 2 \n",
+ "4 https://swapi.dev/api/people/5/ 2 \n",
+ "5 https://swapi.dev/api/people/10/ 2 \n",
+ "6 https://swapi.dev/api/people/13/ 2 \n",
+ "7 https://swapi.dev/api/people/14/ 2 \n",
+ "8 https://swapi.dev/api/people/18/ 2 \n",
+ "9 https://swapi.dev/api/people/20/ 2 \n",
+ "10 https://swapi.dev/api/people/21/ 2 \n",
+ "11 https://swapi.dev/api/people/22/ 2 \n",
+ "12 https://swapi.dev/api/people/23/ 2 \n",
+ "13 https://swapi.dev/api/people/24/ 2 \n",
+ "14 https://swapi.dev/api/people/25/ 2 \n",
+ "15 https://swapi.dev/api/people/26/ 2 "
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ " film_id name_y count\n",
+ "0 2 Droid 3\n",
+ "1 2 Human 10\n",
+ "2 2 Trandoshan 1\n",
+ "3 2 Wookie 1\n",
+ "4 2 Yoda's species 1\n"
+ ]
+ }
+ ],
+ "source": [
+ "import requests\n",
+ "import pandas as pd\n",
+ "\n",
+ "def get_characters_by_film(film_id):\n",
+ " film_url = f\"https://swapi.dev/api/films/{film_id}/\"\n",
+ " response = requests.get(film_url)\n",
+ " film_data = response.json()\n",
+ " characters = []\n",
+ " for character_url in film_data['characters']:\n",
+ " char_response = requests.get(character_url)\n",
+ " character_data = char_response.json()\n",
+ " character_data['film_id'] = film_id # Add film_id to character data\n",
+ " if not character_data['species']:\n",
+ " character_data['species'] = [\"Human\"] # Set species to \"Human\" if empty\n",
+ " characters.append(character_data)\n",
+ " df = pd.DataFrame(characters)\n",
+ " return df\n",
+ "\n",
+ "def group_characters_by_species(df):\n",
+ " species_data = []\n",
+ " species_urls = df['species'].explode().dropna().unique()\n",
+ " for species_url in species_urls:\n",
+ " if species_url != \"Human\": # Skip \"Human\" as it's not a URL\n",
+ " species_response = requests.get(species_url)\n",
+ " species_data.append(species_response.json())\n",
+ " else:\n",
+ " species_data.append({\"name\": \"Human\", \"url\": \"Human\"})\n",
+ " species_df = pd.DataFrame(species_data)\n",
+ " merged_df = df.explode('species').merge(species_df, left_on='species', right_on='url', how='left')\n",
+ " grouped_df = merged_df.groupby(['film_id', 'name_y']).size().reset_index(name='count')\n",
+ " return grouped_df\n",
+ "\n",
+ "characters_df = get_characters_by_film(2)\n",
+ "display(characters_df)\n",
+ "grouped_characters_df = group_characters_by_species(characters_df)\n",
+ "print(grouped_characters_df)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 36,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "[('The Empire Strikes Back', 40),\n",
+ " ('A New Hope', 34),\n",
+ " ('Return of the Jedi', 34),\n",
+ " ('Revenge of the Sith', 20),\n",
+ " ('The Phantom Menace', 18),\n",
+ " ('Attack of the Clones', 16)]"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "import requests\n",
+ "import pandas as pd\n",
+ "\n",
+ "def get_all_films():\n",
+ " films_url = \"https://swapi.dev/api/films/\"\n",
+ " response = requests.get(films_url)\n",
+ " films = response.json()['results']\n",
+ " return films\n",
+ "\n",
+ "def rank_films_by_characters():\n",
+ " films = get_all_films()\n",
+ " film_ranking = []\n",
+ " for film in films:\n",
+ " film_id = film['episode_id']\n",
+ " characters_df = get_characters_by_film(film_id)\n",
+ " num_characters = len(characters_df)\n",
+ " film_ranking.append((film['title'], num_characters))\n",
+ " film_ranking.sort(key=lambda x: x[1], reverse=True)\n",
+ " return film_ranking\n",
+ "\n",
+ "film_ranking = rank_films_by_characters()\n",
+ "display(film_ranking)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 38,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " name | \n",
+ " model | \n",
+ " manufacturer | \n",
+ " cost_in_credits | \n",
+ " length | \n",
+ " max_atmosphering_speed | \n",
+ " crew | \n",
+ " passengers | \n",
+ " cargo_capacity | \n",
+ " consumables | \n",
+ " hyperdrive_rating | \n",
+ " MGLT | \n",
+ " starship_class | \n",
+ " pilots | \n",
+ " films | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " | 0 | \n",
+ " CR90 corvette | \n",
+ " CR90 corvette | \n",
+ " Corellian Engineering Corporation | \n",
+ " 3500000 | \n",
+ " 150 | \n",
+ " 950 | \n",
+ " 30-165 | \n",
+ " 600 | \n",
+ " 3000000 | \n",
+ " 1 year | \n",
+ " 2.0 | \n",
+ " 60 | \n",
+ " corvette | \n",
+ " [] | \n",
+ " [https://swapi.dev/api/films/1/, https://swapi... | \n",
+ "
\n",
+ " \n",
+ " | 1 | \n",
+ " Star Destroyer | \n",
+ " Imperial I-class Star Destroyer | \n",
+ " Kuat Drive Yards | \n",
+ " 150000000 | \n",
+ " 1,600 | \n",
+ " 975 | \n",
+ " 47,060 | \n",
+ " n/a | \n",
+ " 36000000 | \n",
+ " 2 years | \n",
+ " 2.0 | \n",
+ " 60 | \n",
+ " Star Destroyer | \n",
+ " [] | \n",
+ " [https://swapi.dev/api/films/1/, https://swapi... | \n",
+ "
\n",
+ " \n",
+ " | 2 | \n",
+ " Sentinel-class landing craft | \n",
+ " Sentinel-class landing craft | \n",
+ " Sienar Fleet Systems, Cyngus Spaceworks | \n",
+ " 240000 | \n",
+ " 38 | \n",
+ " 1000 | \n",
+ " 5 | \n",
+ " 75 | \n",
+ " 180000 | \n",
+ " 1 month | \n",
+ " 1.0 | \n",
+ " 70 | \n",
+ " landing craft | \n",
+ " [] | \n",
+ " [https://swapi.dev/api/films/1/] | \n",
+ "
\n",
+ " \n",
+ " | 3 | \n",
+ " Death Star | \n",
+ " DS-1 Orbital Battle Station | \n",
+ " Imperial Department of Military Research, Sien... | \n",
+ " 1000000000000 | \n",
+ " 120000 | \n",
+ " n/a | \n",
+ " 342,953 | \n",
+ " 843,342 | \n",
+ " 1000000000000 | \n",
+ " 3 years | \n",
+ " 4.0 | \n",
+ " 10 | \n",
+ " Deep Space Mobile Battlestation | \n",
+ " [] | \n",
+ " [https://swapi.dev/api/films/1/] | \n",
+ "
\n",
+ " \n",
+ " | 4 | \n",
+ " Millennium Falcon | \n",
+ " YT-1300 light freighter | \n",
+ " Corellian Engineering Corporation | \n",
+ " 100000 | \n",
+ " 34.37 | \n",
+ " 1050 | \n",
+ " 4 | \n",
+ " 6 | \n",
+ " 100000 | \n",
+ " 2 months | \n",
+ " 0.5 | \n",
+ " 75 | \n",
+ " Light freighter | \n",
+ " [https://swapi.dev/api/people/13/, https://swa... | \n",
+ " [https://swapi.dev/api/films/1/, https://swapi... | \n",
+ "
\n",
+ " \n",
+ " | 5 | \n",
+ " Y-wing | \n",
+ " BTL Y-wing | \n",
+ " Koensayr Manufacturing | \n",
+ " 134999 | \n",
+ " 14 | \n",
+ " 1000km | \n",
+ " 2 | \n",
+ " 0 | \n",
+ " 110 | \n",
+ " 1 week | \n",
+ " 1.0 | \n",
+ " 80 | \n",
+ " assault starfighter | \n",
+ " [] | \n",
+ " [https://swapi.dev/api/films/1/, https://swapi... | \n",
+ "
\n",
+ " \n",
+ " | 6 | \n",
+ " X-wing | \n",
+ " T-65 X-wing | \n",
+ " Incom Corporation | \n",
+ " 149999 | \n",
+ " 12.5 | \n",
+ " 1050 | \n",
+ " 1 | \n",
+ " 0 | \n",
+ " 110 | \n",
+ " 1 week | \n",
+ " 1.0 | \n",
+ " 100 | \n",
+ " Starfighter | \n",
+ " [https://swapi.dev/api/people/1/, https://swap... | \n",
+ " [https://swapi.dev/api/films/1/, https://swapi... | \n",
+ "
\n",
+ " \n",
+ " | 7 | \n",
+ " TIE Advanced x1 | \n",
+ " Twin Ion Engine Advanced x1 | \n",
+ " Sienar Fleet Systems | \n",
+ " unknown | \n",
+ " 9.2 | \n",
+ " 1200 | \n",
+ " 1 | \n",
+ " 0 | \n",
+ " 150 | \n",
+ " 5 days | \n",
+ " 1.0 | \n",
+ " 105 | \n",
+ " Starfighter | \n",
+ " [https://swapi.dev/api/people/4/] | \n",
+ " [https://swapi.dev/api/films/1/] | \n",
+ "
\n",
+ " \n",
+ " | 8 | \n",
+ " Executor | \n",
+ " Executor-class star dreadnought | \n",
+ " Kuat Drive Yards, Fondor Shipyards | \n",
+ " 1143350000 | \n",
+ " 19000 | \n",
+ " n/a | \n",
+ " 279,144 | \n",
+ " 38000 | \n",
+ " 250000000 | \n",
+ " 6 years | \n",
+ " 2.0 | \n",
+ " 40 | \n",
+ " Star dreadnought | \n",
+ " [] | \n",
+ " [https://swapi.dev/api/films/2/, https://swapi... | \n",
+ "
\n",
+ " \n",
+ " | 9 | \n",
+ " Rebel transport | \n",
+ " GR-75 medium transport | \n",
+ " Gallofree Yards, Inc. | \n",
+ " unknown | \n",
+ " 90 | \n",
+ " 650 | \n",
+ " 6 | \n",
+ " 90 | \n",
+ " 19000000 | \n",
+ " 6 months | \n",
+ " 4.0 | \n",
+ " 20 | \n",
+ " Medium transport | \n",
+ " [] | \n",
+ " [https://swapi.dev/api/films/2/, https://swapi... | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " name model \\\n",
+ "0 CR90 corvette CR90 corvette \n",
+ "1 Star Destroyer Imperial I-class Star Destroyer \n",
+ "2 Sentinel-class landing craft Sentinel-class landing craft \n",
+ "3 Death Star DS-1 Orbital Battle Station \n",
+ "4 Millennium Falcon YT-1300 light freighter \n",
+ "5 Y-wing BTL Y-wing \n",
+ "6 X-wing T-65 X-wing \n",
+ "7 TIE Advanced x1 Twin Ion Engine Advanced x1 \n",
+ "8 Executor Executor-class star dreadnought \n",
+ "9 Rebel transport GR-75 medium transport \n",
+ "\n",
+ " manufacturer cost_in_credits length \\\n",
+ "0 Corellian Engineering Corporation 3500000 150 \n",
+ "1 Kuat Drive Yards 150000000 1,600 \n",
+ "2 Sienar Fleet Systems, Cyngus Spaceworks 240000 38 \n",
+ "3 Imperial Department of Military Research, Sien... 1000000000000 120000 \n",
+ "4 Corellian Engineering Corporation 100000 34.37 \n",
+ "5 Koensayr Manufacturing 134999 14 \n",
+ "6 Incom Corporation 149999 12.5 \n",
+ "7 Sienar Fleet Systems unknown 9.2 \n",
+ "8 Kuat Drive Yards, Fondor Shipyards 1143350000 19000 \n",
+ "9 Gallofree Yards, Inc. unknown 90 \n",
+ "\n",
+ " max_atmosphering_speed crew passengers cargo_capacity consumables \\\n",
+ "0 950 30-165 600 3000000 1 year \n",
+ "1 975 47,060 n/a 36000000 2 years \n",
+ "2 1000 5 75 180000 1 month \n",
+ "3 n/a 342,953 843,342 1000000000000 3 years \n",
+ "4 1050 4 6 100000 2 months \n",
+ "5 1000km 2 0 110 1 week \n",
+ "6 1050 1 0 110 1 week \n",
+ "7 1200 1 0 150 5 days \n",
+ "8 n/a 279,144 38000 250000000 6 years \n",
+ "9 650 6 90 19000000 6 months \n",
+ "\n",
+ " hyperdrive_rating MGLT starship_class \\\n",
+ "0 2.0 60 corvette \n",
+ "1 2.0 60 Star Destroyer \n",
+ "2 1.0 70 landing craft \n",
+ "3 4.0 10 Deep Space Mobile Battlestation \n",
+ "4 0.5 75 Light freighter \n",
+ "5 1.0 80 assault starfighter \n",
+ "6 1.0 100 Starfighter \n",
+ "7 1.0 105 Starfighter \n",
+ "8 2.0 40 Star dreadnought \n",
+ "9 4.0 20 Medium transport \n",
+ "\n",
+ " pilots \\\n",
+ "0 [] \n",
+ "1 [] \n",
+ "2 [] \n",
+ "3 [] \n",
+ "4 [https://swapi.dev/api/people/13/, https://swa... \n",
+ "5 [] \n",
+ "6 [https://swapi.dev/api/people/1/, https://swap... \n",
+ "7 [https://swapi.dev/api/people/4/] \n",
+ "8 [] \n",
+ "9 [] \n",
+ "\n",
+ " films \n",
+ "0 [https://swapi.dev/api/films/1/, https://swapi... \n",
+ "1 [https://swapi.dev/api/films/1/, https://swapi... \n",
+ "2 [https://swapi.dev/api/films/1/] \n",
+ "3 [https://swapi.dev/api/films/1/] \n",
+ "4 [https://swapi.dev/api/films/1/, https://swapi... \n",
+ "5 [https://swapi.dev/api/films/1/, https://swapi... \n",
+ "6 [https://swapi.dev/api/films/1/, https://swapi... \n",
+ "7 [https://swapi.dev/api/films/1/] \n",
+ "8 [https://swapi.dev/api/films/2/, https://swapi... \n",
+ "9 [https://swapi.dev/api/films/2/, https://swapi... "
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "import requests\n",
+ "import pandas as pd\n",
+ "\n",
+ "def get_starships():\n",
+ " starships_url = \"https://swapi.dev/api/starships/\"\n",
+ " response = requests.get(starships_url)\n",
+ " starships = response.json()['results']\n",
+ " starships_info = []\n",
+ " for starship in starships:\n",
+ " starships_info.append({\n",
+ " \"name\": starship[\"name\"],\n",
+ " \"model\": starship[\"model\"],\n",
+ " \"manufacturer\": starship[\"manufacturer\"],\n",
+ " \"cost_in_credits\": starship[\"cost_in_credits\"],\n",
+ " \"length\": starship[\"length\"],\n",
+ " \"max_atmosphering_speed\": starship[\"max_atmosphering_speed\"],\n",
+ " \"crew\": starship[\"crew\"],\n",
+ " \"passengers\": starship[\"passengers\"],\n",
+ " \"cargo_capacity\": starship[\"cargo_capacity\"],\n",
+ " \"consumables\": starship[\"consumables\"],\n",
+ " \"hyperdrive_rating\": starship[\"hyperdrive_rating\"],\n",
+ " \"MGLT\": starship[\"MGLT\"],\n",
+ " \"starship_class\": starship[\"starship_class\"],\n",
+ " \"pilots\": starship[\"pilots\"],\n",
+ " \"films\": starship[\"films\"]\n",
+ " })\n",
+ " return starships_info\n",
+ "\n",
+ "# Store starship information in a DataFrame\n",
+ "starships_info = get_starships()\n",
+ "starships_df = pd.DataFrame(starships_info)\n",
+ "\n",
+ "# Example usage\n",
+ "display(starships_df)"
+ ]
+ },
{
"cell_type": "markdown",
"metadata": {},
@@ -321,29 +1561,180 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 41,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " film_title | \n",
+ " character_name | \n",
+ " species_name | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " | 0 | \n",
+ " A New Hope | \n",
+ " Luke Skywalker | \n",
+ " Human | \n",
+ "
\n",
+ " \n",
+ " | 1 | \n",
+ " A New Hope | \n",
+ " C-3PO | \n",
+ " Droid | \n",
+ "
\n",
+ " \n",
+ " | 2 | \n",
+ " A New Hope | \n",
+ " R2-D2 | \n",
+ " Droid | \n",
+ "
\n",
+ " \n",
+ " | 3 | \n",
+ " A New Hope | \n",
+ " Darth Vader | \n",
+ " Human | \n",
+ "
\n",
+ " \n",
+ " | 4 | \n",
+ " A New Hope | \n",
+ " Leia Organa | \n",
+ " Human | \n",
+ "
\n",
+ " \n",
+ " | ... | \n",
+ " ... | \n",
+ " ... | \n",
+ " ... | \n",
+ "
\n",
+ " \n",
+ " | 157 | \n",
+ " Revenge of the Sith | \n",
+ " Grievous | \n",
+ " Kaleesh | \n",
+ "
\n",
+ " \n",
+ " | 158 | \n",
+ " Revenge of the Sith | \n",
+ " Tarfful | \n",
+ " Wookie | \n",
+ "
\n",
+ " \n",
+ " | 159 | \n",
+ " Revenge of the Sith | \n",
+ " Raymus Antilles | \n",
+ " Human | \n",
+ "
\n",
+ " \n",
+ " | 160 | \n",
+ " Revenge of the Sith | \n",
+ " Sly Moore | \n",
+ " Human | \n",
+ "
\n",
+ " \n",
+ " | 161 | \n",
+ " Revenge of the Sith | \n",
+ " Tion Medon | \n",
+ " Pau'an | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
162 rows × 3 columns
\n",
+ "
"
+ ],
+ "text/plain": [
+ " film_title character_name species_name\n",
+ "0 A New Hope Luke Skywalker Human\n",
+ "1 A New Hope C-3PO Droid\n",
+ "2 A New Hope R2-D2 Droid\n",
+ "3 A New Hope Darth Vader Human\n",
+ "4 A New Hope Leia Organa Human\n",
+ ".. ... ... ...\n",
+ "157 Revenge of the Sith Grievous Kaleesh\n",
+ "158 Revenge of the Sith Tarfful Wookie\n",
+ "159 Revenge of the Sith Raymus Antilles Human\n",
+ "160 Revenge of the Sith Sly Moore Human\n",
+ "161 Revenge of the Sith Tion Medon Pau'an\n",
+ "\n",
+ "[162 rows x 3 columns]"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
"source": [
"import requests\n",
+ "import pandas as pd\n",
"\n",
- "url = \"https://swapi-graphql.netlify.app/.netlify/functions/index\"\n",
- "body = \"\"\"\n",
- "query {\n",
+ "# Define the GraphQL query\n",
+ "query = \"\"\"\n",
+ "{\n",
" allFilms {\n",
" edges {\n",
" node {\n",
+ " id\n",
" title\n",
+ " characterConnection {\n",
+ " characters {\n",
+ " name\n",
+ " species {\n",
+ " name\n",
+ " }\n",
+ " }\n",
+ " }\n",
" }\n",
" }\n",
" }\n",
"}\n",
"\"\"\"\n",
"\n",
- "response = requests.get(url=url, json={\"query\": body})\n",
- "print(\"response status code: \", response.status_code)\n",
- "if response.status_code == 200:\n",
- " print(\"response : \", response.json())"
+ "# Define the URL for the GraphQL endpoint\n",
+ "url = \"https://swapi-graphql.netlify.app/.netlify/functions/index\"\n",
+ "\n",
+ "# Send the request to the GraphQL endpoint\n",
+ "response = requests.post(url, json={'query': query})\n",
+ "data = response.json()\n",
+ "\n",
+ "# Extract film and character information\n",
+ "films = []\n",
+ "for film_edge in data['data']['allFilms']['edges']:\n",
+ " film_node = film_edge['node']\n",
+ " film_title = film_node['title']\n",
+ " for character in film_node['characterConnection']['characters']:\n",
+ " character_name = character['name']\n",
+ " species_name = character['species']['name'] if character['species'] else \"Human\"\n",
+ " films.append({\n",
+ " \"film_title\": film_title,\n",
+ " \"character_name\": character_name,\n",
+ " \"species_name\": species_name\n",
+ " })\n",
+ "\n",
+ "# Store the information in a DataFrame\n",
+ "films_df = pd.DataFrame(films)\n",
+ "\n",
+ "# Example usage\n",
+ "display(films_df)"
]
},
{
@@ -356,7 +1747,7 @@
],
"metadata": {
"kernelspec": {
- "display_name": "Python 3.9.2 64-bit",
+ "display_name": "Python 3",
"language": "python",
"name": "python3"
},
@@ -370,14 +1761,9 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.2"
+ "version": "3.12.3"
},
- "orig_nbformat": 4,
- "vscode": {
- "interpreter": {
- "hash": "916dbcbb3f70747c44a77c7bcd40155683ae19c65e1c03b4aa3499c5328201f1"
- }
- }
+ "orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
diff --git a/TP2 and 3/README.md b/TP2 and 3/README.md
index f8db5758..6c26004e 100644
--- a/TP2 and 3/README.md
+++ b/TP2 and 3/README.md
@@ -8,7 +8,7 @@
- FastAPI
- - All of the above
+ - __All of the above__
- **Question 2:** _What's the main difference between Django, Flask and FastAPI in terms of performance and speed?_
@@ -16,7 +16,7 @@
- Flask outperforms Django and FastAPI.
- - FastAPI is renowned for its increased speed and performance compared with Django and Flask.
+ - __FastAPI is renowned for its increased speed and performance compared with Django and Flask.__
- Django, Flask and FastAPI have equivalent performance.
@@ -26,13 +26,13 @@
- A breakpoint in the code where the API can be interrupted.
- - A specific URL to which a request can be sent to interact with the API.
+ - __A specific URL to which a request can be sent to interact with the API.__
- A unique identifier assigned to each incoming request.
- **Question 4:** _What are the main HTTP verbs used to define REST API methods?_
- - GET, POST, PUT, PATCH, DELETE
+ - __GET, POST, PUT, PATCH, DELETE__
- SEND, RECEIVE, UPDATE, REMOVE
@@ -46,7 +46,7 @@
- An external library used to speed up API development.
- - Intermediate software that processes the request before it reaches the main application.
+ - __Intermediate software that processes the request before it reaches the main application.__
- A method for securing data stored in the database.
@@ -56,7 +56,7 @@
- PyJSON
- - json.dumps() and json.loads()
+ - __json.dumps() and json.loads()__
- serializeJSON
@@ -64,7 +64,7 @@
- Create a new resource.
- - Update an existing resource, or create one if it doesn't exist.
+ - __Update an existing resource, or create one if it doesn't exist.__
- Delete a resource.
@@ -72,7 +72,7 @@
- **Question 8:** In FastAPI, how do you define an endpoint to handle a POST request with JSON data?\*
- - @app.post("/endpoint")
+ - __@app.post("/endpoint")__
- @app.get("/endpoint")
diff --git a/TP2 and 3/services/epf-flower-data-science/__pycache__/main.cpython-312.pyc b/TP2 and 3/services/epf-flower-data-science/__pycache__/main.cpython-312.pyc
new file mode 100644
index 00000000..3e752cc6
Binary files /dev/null and b/TP2 and 3/services/epf-flower-data-science/__pycache__/main.cpython-312.pyc differ
diff --git a/TP2 and 3/services/epf-flower-data-science/main.py b/TP2 and 3/services/epf-flower-data-science/main.py
index bcdba253..5dfa82bd 100644
--- a/TP2 and 3/services/epf-flower-data-science/main.py
+++ b/TP2 and 3/services/epf-flower-data-science/main.py
@@ -5,4 +5,4 @@
app = get_application()
if __name__ == "__main__":
- uvicorn.run("main:app", debug=True, reload=True, port=8080)
+ uvicorn.run("main:app",host="127.0.0.1", debug=True, reload=True, port=8080)
diff --git a/TP2 and 3/services/epf-flower-data-science/src/__pycache__/app.cpython-312.pyc b/TP2 and 3/services/epf-flower-data-science/src/__pycache__/app.cpython-312.pyc
new file mode 100644
index 00000000..890beeb2
Binary files /dev/null and b/TP2 and 3/services/epf-flower-data-science/src/__pycache__/app.cpython-312.pyc differ
diff --git a/TP2 and 3/services/epf-flower-data-science/src/api/__pycache__/router.cpython-312.pyc b/TP2 and 3/services/epf-flower-data-science/src/api/__pycache__/router.cpython-312.pyc
new file mode 100644
index 00000000..b099a7aa
Binary files /dev/null and b/TP2 and 3/services/epf-flower-data-science/src/api/__pycache__/router.cpython-312.pyc differ
diff --git a/TP2 and 3/services/epf-flower-data-science/src/api/router.py b/TP2 and 3/services/epf-flower-data-science/src/api/router.py
index 15529962..f906f061 100644
--- a/TP2 and 3/services/epf-flower-data-science/src/api/router.py
+++ b/TP2 and 3/services/epf-flower-data-science/src/api/router.py
@@ -1,8 +1,17 @@
"""API Router for Fast API."""
-from fastapi import APIRouter
+from fastapi import APIRouter, FastAPI
+from fastapi.responses import RedirectResponse
-from src.api.routes import hello
+from src.api.routes import hello, data
router = APIRouter()
router.include_router(hello.router, tags=["Hello"])
+router.include_router(data.router, tags=["Data"])
+
+app = FastAPI()
+app.include_router(router)
+
+@router.get("/", include_in_schema=False)
+async def root():
+ return RedirectResponse(url="/docs")
\ No newline at end of file
diff --git a/TP2 and 3/services/epf-flower-data-science/src/api/routes/__pycache__/data.cpython-312.pyc b/TP2 and 3/services/epf-flower-data-science/src/api/routes/__pycache__/data.cpython-312.pyc
new file mode 100644
index 00000000..aafaf8d7
Binary files /dev/null and b/TP2 and 3/services/epf-flower-data-science/src/api/routes/__pycache__/data.cpython-312.pyc differ
diff --git a/TP2 and 3/services/epf-flower-data-science/src/api/routes/__pycache__/hello.cpython-312.pyc b/TP2 and 3/services/epf-flower-data-science/src/api/routes/__pycache__/hello.cpython-312.pyc
new file mode 100644
index 00000000..9499bff9
Binary files /dev/null and b/TP2 and 3/services/epf-flower-data-science/src/api/routes/__pycache__/hello.cpython-312.pyc differ
diff --git a/TP2 and 3/services/epf-flower-data-science/src/api/routes/data.py b/TP2 and 3/services/epf-flower-data-science/src/api/routes/data.py
index e69de29b..6605ae2c 100644
--- a/TP2 and 3/services/epf-flower-data-science/src/api/routes/data.py
+++ b/TP2 and 3/services/epf-flower-data-science/src/api/routes/data.py
@@ -0,0 +1,252 @@
+import os
+import json
+from fastapi import APIRouter, HTTPException
+from pydantic import BaseModel
+import opendatasets as od
+import pandas as pd
+from fastapi.encoders import jsonable_encoder
+
+from src.services.data import load_iris_dataset, process_iris_dataset, split_iris_dataset
+
+router = APIRouter()
+
+class DatasetInfo(BaseModel):
+ name: str
+ url: str
+
+@router.get("/download-dataset")
+async def download_dataset():
+ config_path = os.path.join(os.path.dirname(__file__), '../../config/config.json')
+
+ # Vérifie si le fichier config.json existe
+ if not os.path.exists(config_path):
+ raise HTTPException(status_code=404, detail="Config file not found")
+
+ # Lit le fichier config.json
+ try:
+ with open(config_path) as config_file:
+ config = json.load(config_file)
+ except json.JSONDecodeError:
+ raise HTTPException(status_code=400, detail="Error decoding JSON file")
+
+ # Vérifie si l'URL du dataset est dans le fichier JSON
+ if "iris" not in config or "url" not in config["iris"]:
+ raise HTTPException(status_code=400, detail="Dataset URL not found in config file")
+
+ dataset_url = config["iris"]["url"]
+ data_dir = os.path.join(os.path.dirname(__file__), '../../data')
+
+ if not os.path.exists(data_dir):
+ os.makedirs(data_dir)
+
+ # Télécharger le dataset en utilisant opendatasets
+ try:
+ od.download(dataset_url, data_dir)
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=f"Error downloading dataset: {str(e)}")
+
+ return {"message": "Dataset downloaded successfully"}
+
+@router.get("/get-dataset-info")
+async def get_dataset_info():
+ config_path = os.path.join(os.path.dirname(__file__), '../../config/config.json')
+
+ # Vérifie si le fichier config.json existe
+ if not os.path.exists(config_path):
+ raise HTTPException(status_code=404, detail="Config file not found")
+
+ # Lit le fichier config.json
+ try:
+ with open(config_path) as config_file:
+ config = json.load(config_file)
+ except json.JSONDecodeError:
+ raise HTTPException(status_code=400, detail="Error decoding JSON file")
+
+ # Vérifie si les informations du dataset sont dans le fichier JSON
+ if "iris" not in config:
+ raise HTTPException(status_code=400, detail="Dataset information not found in config file")
+
+ return config["iris"]
+
+@router.post("/add-dataset")
+async def add_dataset(dataset: DatasetInfo):
+ config_path = os.path.join(os.path.dirname(__file__), '../../config/config.json')
+
+ # Vérifie si le fichier config.json existe
+ if not os.path.exists(config_path):
+ raise HTTPException(status_code=404, detail="Config file not found")
+
+ # Lit le fichier config.json
+ try:
+ with open(config_path, 'r') as config_file:
+ config = json.load(config_file)
+ except json.JSONDecodeError:
+ raise HTTPException(status_code=400, detail="Error decoding JSON file")
+
+ # Ajoute les informations du dataset au fichier JSON
+ config[dataset.name] = {"name": dataset.name, "url": dataset.url}
+
+ # Écrit les modifications dans le fichier config.json
+ try:
+ with open(config_path, 'w') as config_file:
+ json.dump(config, config_file, indent=4)
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=f"Error writing to config file: {str(e)}")
+
+ return {"message": "Dataset added successfully"}
+
+import os
+import json
+from fastapi import APIRouter, HTTPException
+from pydantic import BaseModel
+import opendatasets as od
+
+router = APIRouter()
+
+class DatasetInfo(BaseModel):
+ name: str
+ url: str
+
+@router.get("/download-dataset")
+async def download_dataset():
+ config_path = os.path.join(os.path.dirname(__file__), '../../config/config.json')
+
+ # Vérifie si le fichier config.json existe
+ if not os.path.exists(config_path):
+ raise HTTPException(status_code=404, detail="Config file not found")
+
+ # Lit le fichier config.json
+ try:
+ with open(config_path) as config_file:
+ config = json.load(config_file)
+ except json.JSONDecodeError:
+ raise HTTPException(status_code=400, detail="Error decoding JSON file")
+
+ # Vérifie si l'URL du dataset est dans le fichier JSON
+ if "iris" not in config or "url" not in config["iris"]:
+ raise HTTPException(status_code=400, detail="Dataset URL not found in config file")
+
+ dataset_url = config["iris"]["url"]
+ data_dir = os.path.join(os.path.dirname(__file__), '../../data')
+
+ if not os.path.exists(data_dir):
+ os.makedirs(data_dir)
+
+ # Télécharger le dataset en utilisant opendatasets
+ try:
+ od.download(dataset_url, data_dir)
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=f"Error downloading dataset: {str(e)}")
+
+ return {"message": "Dataset downloaded successfully"}
+
+@router.get("/get-dataset-info")
+async def get_dataset_info():
+ config_path = os.path.join(os.path.dirname(__file__), '../../config/config.json')
+
+ # Vérifie si le fichier config.json existe
+ if not os.path.exists(config_path):
+ raise HTTPException(status_code=404, detail="Config file not found")
+
+ # Lit le fichier config.json
+ try:
+ with open(config_path) as config_file:
+ config = json.load(config_file)
+ except json.JSONDecodeError:
+ raise HTTPException(status_code=400, detail="Error decoding JSON file")
+
+ # Vérifie si les informations du dataset sont dans le fichier JSON
+ if "iris" not in config:
+ raise HTTPException(status_code=400, detail="Dataset information not found in config file")
+
+ return config["iris"]
+
+@router.post("/add-dataset")
+async def add_dataset(dataset: DatasetInfo):
+ config_path = os.path.join(os.path.dirname(__file__), '../../config/config.json')
+
+ # Vérifie si le fichier config.json existe
+ if not os.path.exists(config_path):
+ raise HTTPException(status_code=404, detail="Config file not found")
+
+ # Lit le fichier config.json
+ try:
+ with open(config_path, 'r') as config_file:
+ config = json.load(config_file)
+ except json.JSONDecodeError:
+ raise HTTPException(status_code=400, detail="Error decoding JSON file")
+
+ # Ajoute les informations du dataset au fichier JSON
+ config[dataset.name] = {"name": dataset.name, "url": dataset.url}
+
+ # Écrit les modifications dans le fichier config.json
+ try:
+ with open(config_path, 'w') as config_file:
+ json.dump(config, config_file, indent=4)
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=f"Error writing to config file: {str(e)}")
+
+ return {"message": "Dataset added successfully"}
+
+@router.put("/update-dataset")
+async def update_dataset(dataset: DatasetInfo):
+ config_path = os.path.join(os.path.dirname(__file__), '../../config/config.json')
+
+ # Vérifie si le fichier config.json existe
+ if not os.path.exists(config_path):
+ raise HTTPException(status_code=404, detail="Config file not found")
+
+ # Lit le fichier config.json
+ try:
+ with open(config_path, 'r') as config_file:
+ config = json.load(config_file)
+ except json.JSONDecodeError:
+ raise HTTPException(status_code=400, detail="Error decoding JSON file")
+
+ # Vérifie si le dataset existe dans le fichier JSON
+ if dataset.name not in config:
+ raise HTTPException(status_code=404, detail="Dataset not found in config file")
+
+ # Met à jour les informations du dataset dans le fichier JSON
+ config[dataset.name] = {"name": dataset.name, "url": dataset.url}
+
+ # Écrit les modifications dans le fichier config.json
+ try:
+ with open(config_path, 'w') as config_file:
+ json.dump(config, config_file, indent=4)
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=f"Error writing to config file: {str(e)}")
+
+ return {"message": "Dataset updated successfully"}
+
+@router.get("/load-iris-dataset")
+async def load_iris_dataset_endpoint():
+ try:
+ df = load_iris_dataset()
+ except HTTPException as e:
+ raise e
+
+ # Renvoie le dataset sous forme de JSON
+ return df.to_json(orient='split')
+
+@router.get("/process-iris-dataset")
+async def process_iris_dataset_endpoint():
+ try:
+ df = load_iris_dataset()
+ df_processed = process_iris_dataset(df)
+ except HTTPException as e:
+ raise e
+
+ # Utiliser jsonable_encoder pour encoder les données avant de les renvoyer
+ return jsonable_encoder(df_processed.to_dict(orient='split'))
+
+@router.get("/split-iris-dataset")
+async def split_iris_dataset_endpoint():
+ try:
+ df = load_iris_dataset()
+ split_data = split_iris_dataset(df)
+ except HTTPException as e:
+ raise e
+
+ # Renvoie les ensembles d'entraînement et de test sous forme de JSON
+ return split_data
\ No newline at end of file
diff --git a/TP2 and 3/services/epf-flower-data-science/src/config/config.json b/TP2 and 3/services/epf-flower-data-science/src/config/config.json
new file mode 100644
index 00000000..c047cc0e
--- /dev/null
+++ b/TP2 and 3/services/epf-flower-data-science/src/config/config.json
@@ -0,0 +1,10 @@
+{
+ "iris": {
+ "name": "iris",
+ "url": "https://www.kaggle.com/datasets/uciml/updated-iris"
+ },
+ "string": {
+ "name": "string",
+ "url": "https://kaggle.blabla"
+ }
+}
\ No newline at end of file
diff --git a/TP2 and 3/services/epf-flower-data-science/src/data/iris/Iris.csv b/TP2 and 3/services/epf-flower-data-science/src/data/iris/Iris.csv
new file mode 100644
index 00000000..1bf42f25
--- /dev/null
+++ b/TP2 and 3/services/epf-flower-data-science/src/data/iris/Iris.csv
@@ -0,0 +1,151 @@
+Id,SepalLengthCm,SepalWidthCm,PetalLengthCm,PetalWidthCm,Species
+1,5.1,3.5,1.4,0.2,Iris-setosa
+2,4.9,3.0,1.4,0.2,Iris-setosa
+3,4.7,3.2,1.3,0.2,Iris-setosa
+4,4.6,3.1,1.5,0.2,Iris-setosa
+5,5.0,3.6,1.4,0.2,Iris-setosa
+6,5.4,3.9,1.7,0.4,Iris-setosa
+7,4.6,3.4,1.4,0.3,Iris-setosa
+8,5.0,3.4,1.5,0.2,Iris-setosa
+9,4.4,2.9,1.4,0.2,Iris-setosa
+10,4.9,3.1,1.5,0.1,Iris-setosa
+11,5.4,3.7,1.5,0.2,Iris-setosa
+12,4.8,3.4,1.6,0.2,Iris-setosa
+13,4.8,3.0,1.4,0.1,Iris-setosa
+14,4.3,3.0,1.1,0.1,Iris-setosa
+15,5.8,4.0,1.2,0.2,Iris-setosa
+16,5.7,4.4,1.5,0.4,Iris-setosa
+17,5.4,3.9,1.3,0.4,Iris-setosa
+18,5.1,3.5,1.4,0.3,Iris-setosa
+19,5.7,3.8,1.7,0.3,Iris-setosa
+20,5.1,3.8,1.5,0.3,Iris-setosa
+21,5.4,3.4,1.7,0.2,Iris-setosa
+22,5.1,3.7,1.5,0.4,Iris-setosa
+23,4.6,3.6,1.0,0.2,Iris-setosa
+24,5.1,3.3,1.7,0.5,Iris-setosa
+25,4.8,3.4,1.9,0.2,Iris-setosa
+26,5.0,3.0,1.6,0.2,Iris-setosa
+27,5.0,3.4,1.6,0.4,Iris-setosa
+28,5.2,3.5,1.5,0.2,Iris-setosa
+29,5.2,3.4,1.4,0.2,Iris-setosa
+30,4.7,3.2,1.6,0.2,Iris-setosa
+31,4.8,3.1,1.6,0.2,Iris-setosa
+32,5.4,3.4,1.5,0.4,Iris-setosa
+33,5.2,4.1,1.5,0.1,Iris-setosa
+34,5.5,4.2,1.4,0.2,Iris-setosa
+35,4.9,3.1,1.5,0.1,Iris-setosa
+36,5.0,3.2,1.2,0.2,Iris-setosa
+37,5.5,3.5,1.3,0.2,Iris-setosa
+38,4.9,3.1,1.5,0.1,Iris-setosa
+39,4.4,3.0,1.3,0.2,Iris-setosa
+40,5.1,3.4,1.5,0.2,Iris-setosa
+41,5.0,3.5,1.3,0.3,Iris-setosa
+42,4.5,2.3,1.3,0.3,Iris-setosa
+43,4.4,3.2,1.3,0.2,Iris-setosa
+44,5.0,3.5,1.6,0.6,Iris-setosa
+45,5.1,3.8,1.9,0.4,Iris-setosa
+46,4.8,3.0,1.4,0.3,Iris-setosa
+47,5.1,3.8,1.6,0.2,Iris-setosa
+48,4.6,3.2,1.4,0.2,Iris-setosa
+49,5.3,3.7,1.5,0.2,Iris-setosa
+50,5.0,3.3,1.4,0.2,Iris-setosa
+51,7.0,3.2,4.7,1.4,Iris-versicolor
+52,6.4,3.2,4.5,1.5,Iris-versicolor
+53,6.9,3.1,4.9,1.5,Iris-versicolor
+54,5.5,2.3,4.0,1.3,Iris-versicolor
+55,6.5,2.8,4.6,1.5,Iris-versicolor
+56,5.7,2.8,4.5,1.3,Iris-versicolor
+57,6.3,3.3,4.7,1.6,Iris-versicolor
+58,4.9,2.4,3.3,1.0,Iris-versicolor
+59,6.6,2.9,4.6,1.3,Iris-versicolor
+60,5.2,2.7,3.9,1.4,Iris-versicolor
+61,5.0,2.0,3.5,1.0,Iris-versicolor
+62,5.9,3.0,4.2,1.5,Iris-versicolor
+63,6.0,2.2,4.0,1.0,Iris-versicolor
+64,6.1,2.9,4.7,1.4,Iris-versicolor
+65,5.6,2.9,3.6,1.3,Iris-versicolor
+66,6.7,3.1,4.4,1.4,Iris-versicolor
+67,5.6,3.0,4.5,1.5,Iris-versicolor
+68,5.8,2.7,4.1,1.0,Iris-versicolor
+69,6.2,2.2,4.5,1.5,Iris-versicolor
+70,5.6,2.5,3.9,1.1,Iris-versicolor
+71,5.9,3.2,4.8,1.8,Iris-versicolor
+72,6.1,2.8,4.0,1.3,Iris-versicolor
+73,6.3,2.5,4.9,1.5,Iris-versicolor
+74,6.1,2.8,4.7,1.2,Iris-versicolor
+75,6.4,2.9,4.3,1.3,Iris-versicolor
+76,6.6,3.0,4.4,1.4,Iris-versicolor
+77,6.8,2.8,4.8,1.4,Iris-versicolor
+78,6.7,3.0,5.0,1.7,Iris-versicolor
+79,6.0,2.9,4.5,1.5,Iris-versicolor
+80,5.7,2.6,3.5,1.0,Iris-versicolor
+81,5.5,2.4,3.8,1.1,Iris-versicolor
+82,5.5,2.4,3.7,1.0,Iris-versicolor
+83,5.8,2.7,3.9,1.2,Iris-versicolor
+84,6.0,2.7,5.1,1.6,Iris-versicolor
+85,5.4,3.0,4.5,1.5,Iris-versicolor
+86,6.0,3.4,4.5,1.6,Iris-versicolor
+87,6.7,3.1,4.7,1.5,Iris-versicolor
+88,6.3,2.3,4.4,1.3,Iris-versicolor
+89,5.6,3.0,4.1,1.3,Iris-versicolor
+90,5.5,2.5,4.0,1.3,Iris-versicolor
+91,5.5,2.6,4.4,1.2,Iris-versicolor
+92,6.1,3.0,4.6,1.4,Iris-versicolor
+93,5.8,2.6,4.0,1.2,Iris-versicolor
+94,5.0,2.3,3.3,1.0,Iris-versicolor
+95,5.6,2.7,4.2,1.3,Iris-versicolor
+96,5.7,3.0,4.2,1.2,Iris-versicolor
+97,5.7,2.9,4.2,1.3,Iris-versicolor
+98,6.2,2.9,4.3,1.3,Iris-versicolor
+99,5.1,2.5,3.0,1.1,Iris-versicolor
+100,5.7,2.8,4.1,1.3,Iris-versicolor
+101,6.3,3.3,6.0,2.5,Iris-virginica
+102,5.8,2.7,5.1,1.9,Iris-virginica
+103,7.1,3.0,5.9,2.1,Iris-virginica
+104,6.3,2.9,5.6,1.8,Iris-virginica
+105,6.5,3.0,5.8,2.2,Iris-virginica
+106,7.6,3.0,6.6,2.1,Iris-virginica
+107,4.9,2.5,4.5,1.7,Iris-virginica
+108,7.3,2.9,6.3,1.8,Iris-virginica
+109,6.7,2.5,5.8,1.8,Iris-virginica
+110,7.2,3.6,6.1,2.5,Iris-virginica
+111,6.5,3.2,5.1,2.0,Iris-virginica
+112,6.4,2.7,5.3,1.9,Iris-virginica
+113,6.8,3.0,5.5,2.1,Iris-virginica
+114,5.7,2.5,5.0,2.0,Iris-virginica
+115,5.8,2.8,5.1,2.4,Iris-virginica
+116,6.4,3.2,5.3,2.3,Iris-virginica
+117,6.5,3.0,5.5,1.8,Iris-virginica
+118,7.7,3.8,6.7,2.2,Iris-virginica
+119,7.7,2.6,6.9,2.3,Iris-virginica
+120,6.0,2.2,5.0,1.5,Iris-virginica
+121,6.9,3.2,5.7,2.3,Iris-virginica
+122,5.6,2.8,4.9,2.0,Iris-virginica
+123,7.7,2.8,6.7,2.0,Iris-virginica
+124,6.3,2.7,4.9,1.8,Iris-virginica
+125,6.7,3.3,5.7,2.1,Iris-virginica
+126,7.2,3.2,6.0,1.8,Iris-virginica
+127,6.2,2.8,4.8,1.8,Iris-virginica
+128,6.1,3.0,4.9,1.8,Iris-virginica
+129,6.4,2.8,5.6,2.1,Iris-virginica
+130,7.2,3.0,5.8,1.6,Iris-virginica
+131,7.4,2.8,6.1,1.9,Iris-virginica
+132,7.9,3.8,6.4,2.0,Iris-virginica
+133,6.4,2.8,5.6,2.2,Iris-virginica
+134,6.3,2.8,5.1,1.5,Iris-virginica
+135,6.1,2.6,5.6,1.4,Iris-virginica
+136,7.7,3.0,6.1,2.3,Iris-virginica
+137,6.3,3.4,5.6,2.4,Iris-virginica
+138,6.4,3.1,5.5,1.8,Iris-virginica
+139,6.0,3.0,4.8,1.8,Iris-virginica
+140,6.9,3.1,5.4,2.1,Iris-virginica
+141,6.7,3.1,5.6,2.4,Iris-virginica
+142,6.9,3.1,5.1,2.3,Iris-virginica
+143,5.8,2.7,5.1,1.9,Iris-virginica
+144,6.8,3.2,5.9,2.3,Iris-virginica
+145,6.7,3.3,5.7,2.5,Iris-virginica
+146,6.7,3.0,5.2,2.3,Iris-virginica
+147,6.3,2.5,5.0,1.9,Iris-virginica
+148,6.5,3.0,5.2,2.0,Iris-virginica
+149,6.2,3.4,5.4,2.3,Iris-virginica
+150,5.9,3.0,5.1,1.8,Iris-virginica
diff --git a/TP2 and 3/services/epf-flower-data-science/src/data/iris/database.sqlite b/TP2 and 3/services/epf-flower-data-science/src/data/iris/database.sqlite
new file mode 100644
index 00000000..1679fb0c
Binary files /dev/null and b/TP2 and 3/services/epf-flower-data-science/src/data/iris/database.sqlite differ
diff --git a/TP2 and 3/services/epf-flower-data-science/src/schemas/__pycache__/camelcase.cpython-312.pyc b/TP2 and 3/services/epf-flower-data-science/src/schemas/__pycache__/camelcase.cpython-312.pyc
new file mode 100644
index 00000000..a5e4fb25
Binary files /dev/null and b/TP2 and 3/services/epf-flower-data-science/src/schemas/__pycache__/camelcase.cpython-312.pyc differ
diff --git a/TP2 and 3/services/epf-flower-data-science/src/schemas/__pycache__/message.cpython-312.pyc b/TP2 and 3/services/epf-flower-data-science/src/schemas/__pycache__/message.cpython-312.pyc
new file mode 100644
index 00000000..f0a11464
Binary files /dev/null and b/TP2 and 3/services/epf-flower-data-science/src/schemas/__pycache__/message.cpython-312.pyc differ
diff --git a/TP2 and 3/services/epf-flower-data-science/src/services/__pycache__/data.cpython-312.pyc b/TP2 and 3/services/epf-flower-data-science/src/services/__pycache__/data.cpython-312.pyc
new file mode 100644
index 00000000..e6a55397
Binary files /dev/null and b/TP2 and 3/services/epf-flower-data-science/src/services/__pycache__/data.cpython-312.pyc differ
diff --git a/TP2 and 3/services/epf-flower-data-science/src/services/data.py b/TP2 and 3/services/epf-flower-data-science/src/services/data.py
index e69de29b..85d4c697 100644
--- a/TP2 and 3/services/epf-flower-data-science/src/services/data.py
+++ b/TP2 and 3/services/epf-flower-data-science/src/services/data.py
@@ -0,0 +1,47 @@
+import os
+import pandas as pd
+from fastapi import HTTPException
+from sklearn.model_selection import train_test_split
+
+
+def load_iris_dataset() -> pd.DataFrame:
+ data_dir = os.path.join(os.path.dirname(__file__), '../data/iris')
+ dataset_path = os.path.join(data_dir, 'Iris.csv')
+
+ # Vérifie si le fichier du dataset existe
+ if not os.path.exists(dataset_path):
+ raise HTTPException(status_code=404, detail="Dataset file not found")
+
+ # Charge le dataset
+ try:
+ df = pd.read_csv(dataset_path)
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=f"Error loading dataset: {str(e)}")
+
+ return df
+
+def process_iris_dataset(df: pd.DataFrame) -> pd.DataFrame:
+ # Renommer les colonnes contenant "iris"
+ try:
+ df_processed = df.rename(columns=lambda x: x.replace('iris-', ''))
+ # Enlever "iris-" dans les noms des espèces
+ df_processed['Species'] = df_processed['Species'].str.replace('iris-', '', case=False)
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=f"Error processing dataset: {str(e)}")
+
+ return df_processed
+
+def split_iris_dataset(df: pd.DataFrame) -> dict:
+ try:
+ X = df.drop(columns=['Species'])
+ y = df['Species']
+ X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=f"Error processing dataset: {str(e)}")
+
+ return {
+ "X_train": X_train.to_json(orient='split'),
+ "X_test": X_test.to_json(orient='split'),
+ "y_train": y_train.to_json(orient='split'),
+ "y_test": y_test.to_json(orient='split')
+ }
\ No newline at end of file
diff --git a/TP2 and 3/services/epf-flower-data-science/tests/unit/api/routes/test_add_config.py b/TP2 and 3/services/epf-flower-data-science/tests/unit/api/routes/test_add_config.py
new file mode 100644
index 00000000..50c18bd3
--- /dev/null
+++ b/TP2 and 3/services/epf-flower-data-science/tests/unit/api/routes/test_add_config.py
@@ -0,0 +1,74 @@
+import os
+import json
+import pytest
+from fastapi.testclient import TestClient
+from unittest.mock import patch, mock_open
+from src.api.routes.data import router, DatasetInfo
+from src.app import get_application
+
+app = get_application()
+app.include_router(router, prefix="/data")
+
+client = TestClient(app)
+
+@pytest.fixture
+def mock_config_file():
+ config_data = {
+ "iris": {
+ "name": "iris",
+ "url": "https://www.kaggle.com/datasets/uciml/iris"
+ }
+ }
+ with patch("builtins.open", mock_open(read_data=json.dumps(config_data))):
+ yield
+
+def test_add_dataset_success(mock_config_file):
+ new_dataset = {
+ "name": "air_france_reviews",
+ "url": "https://www.kaggle.com/api/v1/datasets/download/saharnazyaghoobpoor/air-france-reviews-dataset"
+ }
+ with patch("builtins.open", mock_open()) as mocked_file:
+ response = client.post("/data/add-dataset", json=new_dataset)
+ assert response.status_code == 200
+ assert response.json() == {"message": "Dataset added successfully"}
+
+ # Vérifiez que le fichier a été ouvert en mode écriture
+ mocked_file.assert_called_with(os.path.join(os.path.dirname(__file__), '../../config/config.json'), 'w')
+
+ # Vérifiez que les données ont été écrites dans le fichier
+ handle = mocked_file()
+ handle.write.assert_called_once()
+ written_data = json.loads(handle.write.call_args[0][0])
+ assert new_dataset["name"] in written_data
+ assert written_data[new_dataset["name"]] == new_dataset
+
+def test_config_file_not_found():
+ with patch("os.path.exists", return_value=False):
+ new_dataset = {
+ "name": "air_france_reviews",
+ "url": "https://www.kaggle.com/api/v1/datasets/download/saharnazyaghoobpoor/air-france-reviews-dataset"
+ }
+ response = client.post("/data/add-dataset", json=new_dataset)
+ assert response.status_code == 404
+ assert response.json() == {"detail": "Config file not found"}
+
+def test_error_decoding_json():
+ with patch("builtins.open", mock_open(read_data="invalid json")):
+ new_dataset = {
+ "name": "air_france_reviews",
+ "url": "https://www.kaggle.com/api/v1/datasets/download/saharnazyaghoobpoor/air-france-reviews-dataset"
+ }
+ response = client.post("/data/add-dataset", json=new_dataset)
+ assert response.status_code == 400
+ assert response.json() == {"detail": "Error decoding JSON file"}
+
+def test_error_writing_to_config_file(mock_config_file):
+ new_dataset = {
+ "name": "air_france_reviews",
+ "url": "https://www.kaggle.com/api/v1/datasets/download/saharnazyaghoobpoor/air-france-reviews-dataset"
+ }
+ with patch("builtins.open", mock_open()) as mocked_file:
+ mocked_file.side_effect = Exception("Write error")
+ response = client.post("/data/add-dataset", json=new_dataset)
+ assert response.status_code == 500
+ assert response.json() == {"detail": "Error writing to config file: Write error"}
\ No newline at end of file
diff --git a/TP2 and 3/services/epf-flower-data-science/tests/unit/api/routes/test_data.py b/TP2 and 3/services/epf-flower-data-science/tests/unit/api/routes/test_data.py
new file mode 100644
index 00000000..6e1f874d
--- /dev/null
+++ b/TP2 and 3/services/epf-flower-data-science/tests/unit/api/routes/test_data.py
@@ -0,0 +1,78 @@
+import os
+import json
+import pytest
+from fastapi.testclient import TestClient
+from unittest.mock import patch, mock_open
+from src.api.routes.data import router
+from src.services.data import load_iris_dataset, process_iris_dataset, split_iris_dataset
+from src.app import get_application
+
+app = get_application()
+app.include_router(router, prefix="/data")
+
+client = TestClient(app)
+
+@pytest.fixture
+def mock_config_file():
+ config_data = {
+ "iris": {
+ "name": "iris",
+ "url": "https://www.kaggle.com/datasets/uciml/iris"
+ }
+ }
+ with patch("builtins.open", mock_open(read_data=json.dumps(config_data))):
+ yield
+
+@pytest.fixture
+def mock_dataset_file():
+ dataset_data = """sepal_length,sepal_width,petal_length,petal_width,Species
+5.1,3.5,1.4,0.2,setosa
+4.9,3.0,1.4,0.2,setosa
+4.7,3.2,1.3,0.2,setosa
+"""
+ with patch("builtins.open", mock_open(read_data=dataset_data)):
+ yield
+
+def test_load_iris_dataset_success(mock_dataset_file):
+ df = load_iris_dataset()
+ assert not df.empty
+ assert "Species" in df.columns
+
+def test_process_iris_dataset_success(mock_dataset_file):
+ df = load_iris_dataset()
+ df_processed = process_iris_dataset(df)
+ assert not df_processed.empty
+ assert "Species" in df_processed.columns
+ assert all("iris" not in col for col in df_processed.columns)
+
+def test_split_iris_dataset_success(mock_dataset_file):
+ df = load_iris_dataset()
+ split_data = split_iris_dataset(df)
+ assert "X_train" in split_data
+ assert "X_test" in split_data
+ assert "y_train" in split_data
+ assert "y_test" in split_data
+
+def test_load_iris_dataset_endpoint(mock_dataset_file):
+ response = client.get("/data/load-iris-dataset")
+ assert response.status_code == 200
+ data = response.json()
+ assert "data" in data
+ assert "columns" in data
+
+def test_process_iris_dataset_endpoint(mock_dataset_file):
+ response = client.get("/data/process-iris-dataset")
+ assert response.status_code == 200
+ data = response.json()
+ assert "data" in data
+ assert "columns" in data
+ assert all("iris" not in col for col in data["columns"])
+
+def test_split_iris_dataset_endpoint(mock_dataset_file):
+ response = client.get("/data/split-iris-dataset")
+ assert response.status_code == 200
+ data = response.json()
+ assert "X_train" in data
+ assert "X_test" in data
+ assert "y_train" in data
+ assert "y_test" in data
\ No newline at end of file
diff --git a/TP2 and 3/services/epf-flower-data-science/tests/unit/api/routes/test_load_dataset.py b/TP2 and 3/services/epf-flower-data-science/tests/unit/api/routes/test_load_dataset.py
new file mode 100644
index 00000000..d1e21442
--- /dev/null
+++ b/TP2 and 3/services/epf-flower-data-science/tests/unit/api/routes/test_load_dataset.py
@@ -0,0 +1,63 @@
+import os
+import json
+import pytest
+from fastapi.testclient import TestClient
+from unittest.mock import patch, mock_open
+from src.api.routes.data import router
+from src.app import get_application
+
+app = get_application()
+app.include_router(router, prefix="/data")
+
+client = TestClient(app)
+
+@pytest.fixture
+def mock_config_file():
+ config_data = {
+ "iris": {
+ "name": "iris",
+ "url": "https://www.kaggle.com/datasets/uciml/iris"
+ }
+ }
+ with patch("builtins.open", mock_open(read_data=json.dumps(config_data))):
+ yield
+
+@pytest.fixture
+def mock_download():
+ with patch("opendatasets.download") as mock_download:
+ yield mock_download
+
+def test_download_dataset_success(mock_config_file, mock_download):
+ response = client.get("/data/download-dataset")
+ assert response.status_code == 200
+ assert response.json() == {"message": "Dataset downloaded successfully"}
+ mock_download.assert_called_once_with("https://www.kaggle.com/datasets/uciml/iris", os.path.join(os.path.dirname(__file__), '../../data'))
+
+def test_config_file_not_found():
+ with patch("os.path.exists", return_value=False):
+ response = client.get("/data/download-dataset")
+ assert response.status_code == 404
+ assert response.json() == {"detail": "Config file not found"}
+
+def test_error_decoding_json():
+ with patch("builtins.open", mock_open(read_data="invalid json")):
+ response = client.get("/data/download-dataset")
+ assert response.status_code == 400
+ assert response.json() == {"detail": "Error decoding JSON file"}
+
+def test_dataset_url_not_found(mock_config_file):
+ invalid_config_data = {
+ "iris": {
+ "name": "iris"
+ }
+ }
+ with patch("builtins.open", mock_open(read_data=json.dumps(invalid_config_data))):
+ response = client.get("/data/download-dataset")
+ assert response.status_code == 400
+ assert response.json() == {"detail": "Dataset URL not found in config file"}
+
+def test_error_downloading_dataset(mock_config_file):
+ with patch("opendatasets.download", side_effect=Exception("Download error")):
+ response = client.get("/data/download-dataset")
+ assert response.status_code == 500
+ assert response.json() == {"detail": "Error downloading dataset: Download error"}
\ No newline at end of file
diff --git a/TP2 and 3/services/epf-flower-data-science/tests/unit/api/routes/test_update_config.py b/TP2 and 3/services/epf-flower-data-science/tests/unit/api/routes/test_update_config.py
new file mode 100644
index 00000000..2466884d
--- /dev/null
+++ b/TP2 and 3/services/epf-flower-data-science/tests/unit/api/routes/test_update_config.py
@@ -0,0 +1,83 @@
+import os
+import json
+import pytest
+from fastapi.testclient import TestClient
+from unittest.mock import patch, mock_open
+from src.api.routes.data import router, DatasetInfo
+from src.app import get_application
+
+app = get_application()
+app.include_router(router, prefix="/data")
+
+client = TestClient(app)
+
+@pytest.fixture
+def mock_config_file():
+ config_data = {
+ "iris": {
+ "name": "iris",
+ "url": "https://www.kaggle.com/datasets/uciml/iris"
+ }
+ }
+ with patch("builtins.open", mock_open(read_data=json.dumps(config_data))):
+ yield
+
+def test_update_dataset_success(mock_config_file):
+ updated_dataset = {
+ "name": "iris",
+ "url": "https://www.kaggle.com/datasets/uciml/updated-iris"
+ }
+ with patch("builtins.open", mock_open()) as mocked_file:
+ response = client.put("/data/update-dataset", json=updated_dataset)
+ assert response.status_code == 200
+ assert response.json() == {"message": "Dataset updated successfully"}
+
+ # Vérifiez que le fichier a été ouvert en mode écriture
+ mocked_file.assert_called_with(os.path.join(os.path.dirname(__file__), '../../config/config.json'), 'w')
+
+ # Vérifiez que les données ont été écrites dans le fichier
+ handle = mocked_file()
+ handle.write.assert_called_once()
+ written_data = json.loads(handle.write.call_args[0][0])
+ assert updated_dataset["name"] in written_data
+ assert written_data[updated_dataset["name"]] == updated_dataset
+
+def test_config_file_not_found():
+ with patch("os.path.exists", return_value=False):
+ updated_dataset = {
+ "name": "iris",
+ "url": "https://www.kaggle.com/datasets/uciml/updated-iris"
+ }
+ response = client.put("/data/update-dataset", json=updated_dataset)
+ assert response.status_code == 404
+ assert response.json() == {"detail": "Config file not found"}
+
+def test_error_decoding_json():
+ with patch("builtins.open", mock_open(read_data="invalid json")):
+ updated_dataset = {
+ "name": "iris",
+ "url": "https://www.kaggle.com/datasets/uciml/updated-iris"
+ }
+ response = client.put("/data/update-dataset", json=updated_dataset)
+ assert response.status_code == 400
+ assert response.json() == {"detail": "Error decoding JSON file"}
+
+def test_dataset_not_found(mock_config_file):
+ updated_dataset = {
+ "name": "nonexistent_dataset",
+ "url": "https://www.kaggle.com/datasets/uciml/updated-iris"
+ }
+ response = client.put("/data/update-dataset", json=updated_dataset)
+ assert response.status_code == 404
+ assert response.json() == {"detail": "Dataset not found in config file"}
+
+def test_error_writing_to_config_file(mock_config_file):
+ updated_dataset = {
+ "name": "iris",
+ "url": "https://www.kaggle.com/datasets/uciml/updated-iris"
+ }
+ with patch("builtins.open", mock_open()) as mocked_file:
+ mocked_file.side_effect = Exception("Write error")
+ response = client.put("/data/update-dataset", json=updated_dataset)
+ assert response.status_code == 500
+ assert response.json() == {"detail": "Error writing to config file: Write error"}
\ No newline at end of file