From b7b2d0a5762c091196d2e6ef7a2564e7b14fd397 Mon Sep 17 00:00:00 2001 From: raghav Date: Thu, 9 Nov 2023 02:48:20 +0530 Subject: [PATCH] dockerized the application and fixed bugs --- .dockerignore | 3 + .gitignore | 2 + App/TextInputComponent.js | 3 +- App/package-lock.json | 398 +++++++++++++++++++++++++++++++++++ App/package.json | 1 + Dockerfile | 44 ++-- backend.py | 12 +- build_utils/env.yml | 209 ------------------ build_utils/requirements.txt | 8 + build_utils/run.sh | 9 - build_utils/services.sh | 12 ++ install.sh | 28 +++ medmini.py | 166 ++++++++------- run.sh | 3 + 14 files changed, 583 insertions(+), 315 deletions(-) create mode 100644 .dockerignore create mode 100644 .gitignore delete mode 100644 build_utils/env.yml create mode 100644 build_utils/requirements.txt delete mode 100644 build_utils/run.sh create mode 100644 build_utils/services.sh create mode 100644 install.sh create mode 100644 run.sh diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..26fb440 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,3 @@ +medmini.pdf +README.md +media/ diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..ec32528 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +med_db/ + diff --git a/App/TextInputComponent.js b/App/TextInputComponent.js index adcef75..ca718a2 100644 --- a/App/TextInputComponent.js +++ b/App/TextInputComponent.js @@ -17,7 +17,8 @@ const TextInputComponent = () => { const handleTextSubmit = async () => { try { console.log(myRAM, text); - const url = "http://10.2.133.59:5000/query"; + const url = "http://"+String(process.env.EXPO_PUBLIC_HOSTIP)+":5000/query"; + console.log(url) const headers = { 'Content-Type': 'application/json', }; diff --git a/App/package-lock.json b/App/package-lock.json index ef10634..1898e80 100644 --- a/App/package-lock.json +++ b/App/package-lock.json @@ -15,6 +15,7 @@ "expo-status-bar": "~1.6.0", "react": "18.2.0", "react-native": "0.72.6", + "react-native-animated-ellipsis": "^2.0.0", "react-native-device-info": "^10.11.0" }, "devDependencies": { @@ -6146,6 +6147,71 @@ "node": ">= 6" } }, + "node_modules/babel-code-frame": { + "version": "6.26.0", + "resolved": "https://registry.npmjs.org/babel-code-frame/-/babel-code-frame-6.26.0.tgz", + "integrity": "sha512-XqYMR2dfdGMW+hd0IUZ2PwK+fGeFkOxZJ0wY+JaQAHzt1Zx8LcvpiZD2NiGkEG8qx0CfkAOr5xt76d1e8vG90g==", + "dependencies": { + "chalk": "^1.1.3", + "esutils": "^2.0.2", + "js-tokens": "^3.0.2" + } + }, + "node_modules/babel-code-frame/node_modules/ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/babel-code-frame/node_modules/ansi-styles": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", + "integrity": "sha512-kmCevFghRiWM7HB5zTPULl4r9bVFSWjz62MhqizDGUrq2NWuNMQyuv4tHHoKJHs69M/MF64lEcHdYIocrdWQYA==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/babel-code-frame/node_modules/chalk": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", + "integrity": "sha512-U3lRVLMSlsCfjqYPbLyVv11M9CPW4I728d6TCKMAOJueEeB9/8o+eSsMnxPJD+Q+K909sdESg7C+tIkoH6on1A==", + "dependencies": { + "ansi-styles": "^2.2.1", + "escape-string-regexp": "^1.0.2", + "has-ansi": "^2.0.0", + "strip-ansi": "^3.0.0", + "supports-color": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/babel-code-frame/node_modules/js-tokens": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-3.0.2.tgz", + "integrity": "sha512-RjTcuD4xjtthQkaWH7dFlH85L+QaVtSoOyGdZ3g6HFhS9dFNDfLyqgm2NFe2X6cQpeFmt0452FJjFG5UameExg==" + }, + "node_modules/babel-code-frame/node_modules/strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha512-VhumSSbBqDTP8p2ZLKj40UjBCV4+v8bUSEpUb4KjRgWk9pbqGF4REFj6KEagidb2f/M6AzC0EmFyDNGaw9OCzg==", + "dependencies": { + "ansi-regex": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/babel-code-frame/node_modules/supports-color": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", + "integrity": "sha512-KKNVtd6pCYgPIKU4cp2733HWYCpplQhddZLBUryaAHou723x+FRzQ5Df824Fj+IyyuiQTRoub4SnIFfIcrp70g==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/babel-core": { "version": "7.0.0-bridge.0", "resolved": "https://registry.npmjs.org/babel-core/-/babel-core-7.0.0-bridge.0.tgz", @@ -6154,6 +6220,88 @@ "@babel/core": "^7.0.0-0" } }, + "node_modules/babel-helper-bindify-decorators": { + "version": "6.24.1", + "resolved": "https://registry.npmjs.org/babel-helper-bindify-decorators/-/babel-helper-bindify-decorators-6.24.1.tgz", + "integrity": "sha512-TYX2QQATKA6Wssp6j7jqlw4QLmABDN1olRdEHndYvBXdaXM5dcx6j5rN0+nd+aVL+Th40fAEYvvw/Xxd/LETuQ==", + "dependencies": { + "babel-runtime": "^6.22.0", + "babel-traverse": "^6.24.1", + "babel-types": "^6.24.1" + } + }, + "node_modules/babel-helper-builder-binary-assignment-operator-visitor": { + "version": "6.24.1", + "resolved": "https://registry.npmjs.org/babel-helper-builder-binary-assignment-operator-visitor/-/babel-helper-builder-binary-assignment-operator-visitor-6.24.1.tgz", + "integrity": "sha512-gCtfYORSG1fUMX4kKraymq607FWgMWg+j42IFPc18kFQEsmtaibP4UrqsXt8FlEJle25HUd4tsoDR7H2wDhe9Q==", + "dependencies": { + "babel-helper-explode-assignable-expression": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-types": "^6.24.1" + } + }, + "node_modules/babel-helper-explode-assignable-expression": { + "version": "6.24.1", + "resolved": "https://registry.npmjs.org/babel-helper-explode-assignable-expression/-/babel-helper-explode-assignable-expression-6.24.1.tgz", + "integrity": "sha512-qe5csbhbvq6ccry9G7tkXbzNtcDiH4r51rrPUbwwoTzZ18AqxWYRZT6AOmxrpxKnQBW0pYlBI/8vh73Z//78nQ==", + "dependencies": { + "babel-runtime": "^6.22.0", + "babel-traverse": "^6.24.1", + "babel-types": "^6.24.1" + } + }, + "node_modules/babel-helper-explode-class": { + "version": "6.24.1", + "resolved": "https://registry.npmjs.org/babel-helper-explode-class/-/babel-helper-explode-class-6.24.1.tgz", + "integrity": "sha512-SFbWewr0/0U4AiRzsHqwsbOQeLXVa9T1ELdqEa2efcQB5KopTnunAqoj07TuHlN2lfTQNPGO/rJR4FMln5fVcA==", + "dependencies": { + "babel-helper-bindify-decorators": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-traverse": "^6.24.1", + "babel-types": "^6.24.1" + } + }, + "node_modules/babel-helper-function-name": { + "version": "6.24.1", + "resolved": "https://registry.npmjs.org/babel-helper-function-name/-/babel-helper-function-name-6.24.1.tgz", + "integrity": "sha512-Oo6+e2iX+o9eVvJ9Y5eKL5iryeRdsIkwRYheCuhYdVHsdEQysbc2z2QkqCLIYnNxkT5Ss3ggrHdXiDI7Dhrn4Q==", + "dependencies": { + "babel-helper-get-function-arity": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1", + "babel-traverse": "^6.24.1", + "babel-types": "^6.24.1" + } + }, + "node_modules/babel-helper-get-function-arity": { + "version": "6.24.1", + "resolved": "https://registry.npmjs.org/babel-helper-get-function-arity/-/babel-helper-get-function-arity-6.24.1.tgz", + "integrity": "sha512-WfgKFX6swFB1jS2vo+DwivRN4NB8XUdM3ij0Y1gnC21y1tdBoe6xjVnd7NSI6alv+gZXCtJqvrTeMW3fR/c0ng==", + "dependencies": { + "babel-runtime": "^6.22.0", + "babel-types": "^6.24.1" + } + }, + "node_modules/babel-helper-remap-async-to-generator": { + "version": "6.24.1", + "resolved": "https://registry.npmjs.org/babel-helper-remap-async-to-generator/-/babel-helper-remap-async-to-generator-6.24.1.tgz", + "integrity": "sha512-RYqaPD0mQyQIFRu7Ho5wE2yvA/5jxqCIj/Lv4BXNq23mHYu/vxikOy2JueLiBxQknwapwrJeNCesvY0ZcfnlHg==", + "dependencies": { + "babel-helper-function-name": "^6.24.1", + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1", + "babel-traverse": "^6.24.1", + "babel-types": "^6.24.1" + } + }, + "node_modules/babel-messages": { + "version": "6.23.0", + "resolved": "https://registry.npmjs.org/babel-messages/-/babel-messages-6.23.0.tgz", + "integrity": "sha512-Bl3ZiA+LjqaMtNYopA9TYE9HP1tQ+E5dLxE0XrAzcIJeK2UqF0/EaqXwBn9esd4UmTfEab+P+UYQ1GnioFIb/w==", + "dependencies": { + "babel-runtime": "^6.22.0" + } + }, "node_modules/babel-plugin-module-resolver": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/babel-plugin-module-resolver/-/babel-plugin-module-resolver-5.0.0.tgz", @@ -6247,11 +6395,99 @@ "resolved": "https://registry.npmjs.org/babel-plugin-react-native-web/-/babel-plugin-react-native-web-0.18.12.tgz", "integrity": "sha512-4djr9G6fMdwQoD6LQ7hOKAm39+y12flWgovAqS1k5O8f42YQ3A1FFMyV5kKfetZuGhZO5BmNmOdRRZQ1TixtDw==" }, + "node_modules/babel-plugin-syntax-async-functions": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/babel-plugin-syntax-async-functions/-/babel-plugin-syntax-async-functions-6.13.0.tgz", + "integrity": "sha512-4Zp4unmHgw30A1eWI5EpACji2qMocisdXhAftfhXoSV9j0Tvj6nRFE3tOmRY912E0FMRm/L5xWE7MGVT2FoLnw==" + }, + "node_modules/babel-plugin-syntax-async-generators": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/babel-plugin-syntax-async-generators/-/babel-plugin-syntax-async-generators-6.13.0.tgz", + "integrity": "sha512-EbciFN5Jb9iqU9bqaLmmFLx2G8pAUsvpWJ6OzOWBNrSY9qTohXj+7YfZx6Ug1Qqh7tCb1EA7Jvn9bMC1HBiucg==" + }, + "node_modules/babel-plugin-syntax-class-properties": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/babel-plugin-syntax-class-properties/-/babel-plugin-syntax-class-properties-6.13.0.tgz", + "integrity": "sha512-chI3Rt9T1AbrQD1s+vxw3KcwC9yHtF621/MacuItITfZX344uhQoANjpoSJZleAmW2tjlolqB/f+h7jIqXa7pA==" + }, + "node_modules/babel-plugin-syntax-decorators": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/babel-plugin-syntax-decorators/-/babel-plugin-syntax-decorators-6.13.0.tgz", + "integrity": "sha512-AWj19x2aDm8qFQ5O2JcD6pwJDW1YdcnO+1b81t7gxrGjz5VHiUqeYWAR4h7zueWMalRelrQDXprv2FrY1dbpbw==" + }, + "node_modules/babel-plugin-syntax-dynamic-import": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/babel-plugin-syntax-dynamic-import/-/babel-plugin-syntax-dynamic-import-6.18.0.tgz", + "integrity": "sha512-MioUE+LfjCEz65Wf7Z/Rm4XCP5k2c+TbMd2Z2JKc7U9uwjBhAfNPE48KC4GTGKhppMeYVepwDBNO/nGY6NYHBA==" + }, + "node_modules/babel-plugin-syntax-exponentiation-operator": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/babel-plugin-syntax-exponentiation-operator/-/babel-plugin-syntax-exponentiation-operator-6.13.0.tgz", + "integrity": "sha512-Z/flU+T9ta0aIEKl1tGEmN/pZiI1uXmCiGFRegKacQfEJzp7iNsKloZmyJlQr+75FCJtiFfGIK03SiCvCt9cPQ==" + }, + "node_modules/babel-plugin-syntax-object-rest-spread": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/babel-plugin-syntax-object-rest-spread/-/babel-plugin-syntax-object-rest-spread-6.13.0.tgz", + "integrity": "sha512-C4Aq+GaAj83pRQ0EFgTvw5YO6T3Qz2KGrNRwIj9mSoNHVvdZY4KO2uA6HNtNXCw993iSZnckY1aLW8nOi8i4+w==" + }, "node_modules/babel-plugin-syntax-trailing-function-commas": { "version": "7.0.0-beta.0", "resolved": "https://registry.npmjs.org/babel-plugin-syntax-trailing-function-commas/-/babel-plugin-syntax-trailing-function-commas-7.0.0-beta.0.tgz", "integrity": "sha512-Xj9XuRuz3nTSbaTXWv3itLOcxyF4oPD8douBBmj7U9BBC6nEBYfyOJYQMf/8PJAFotC62UY5dFfIGEPr7WswzQ==" }, + "node_modules/babel-plugin-transform-async-generator-functions": { + "version": "6.24.1", + "resolved": "https://registry.npmjs.org/babel-plugin-transform-async-generator-functions/-/babel-plugin-transform-async-generator-functions-6.24.1.tgz", + "integrity": "sha512-uT7eovUxtXe8Q2ufcjRuJIOL0hg6VAUJhiWJBLxH/evYAw+aqoJLcYTR8hqx13iOx/FfbCMHgBmXWZjukbkyPg==", + "dependencies": { + "babel-helper-remap-async-to-generator": "^6.24.1", + "babel-plugin-syntax-async-generators": "^6.5.0", + "babel-runtime": "^6.22.0" + } + }, + "node_modules/babel-plugin-transform-async-to-generator": { + "version": "6.24.1", + "resolved": "https://registry.npmjs.org/babel-plugin-transform-async-to-generator/-/babel-plugin-transform-async-to-generator-6.24.1.tgz", + "integrity": "sha512-7BgYJujNCg0Ti3x0c/DL3tStvnKS6ktIYOmo9wginv/dfZOrbSZ+qG4IRRHMBOzZ5Awb1skTiAsQXg/+IWkZYw==", + "dependencies": { + "babel-helper-remap-async-to-generator": "^6.24.1", + "babel-plugin-syntax-async-functions": "^6.8.0", + "babel-runtime": "^6.22.0" + } + }, + "node_modules/babel-plugin-transform-class-properties": { + "version": "6.24.1", + "resolved": "https://registry.npmjs.org/babel-plugin-transform-class-properties/-/babel-plugin-transform-class-properties-6.24.1.tgz", + "integrity": "sha512-n4jtBA3OYBdvG5PRMKsMXJXHfLYw/ZOmtxCLOOwz6Ro5XlrColkStLnz1AS1L2yfPA9BKJ1ZNlmVCLjAL9DSIg==", + "dependencies": { + "babel-helper-function-name": "^6.24.1", + "babel-plugin-syntax-class-properties": "^6.8.0", + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1" + } + }, + "node_modules/babel-plugin-transform-decorators": { + "version": "6.24.1", + "resolved": "https://registry.npmjs.org/babel-plugin-transform-decorators/-/babel-plugin-transform-decorators-6.24.1.tgz", + "integrity": "sha512-skQ2CImwDkCHu0mkWvCOlBCpBIHW4/49IZWVwV4A/EnWjL9bB6UBvLyMNe3Td5XDStSZNhe69j4bfEW8dvUbew==", + "dependencies": { + "babel-helper-explode-class": "^6.24.1", + "babel-plugin-syntax-decorators": "^6.13.0", + "babel-runtime": "^6.22.0", + "babel-template": "^6.24.1", + "babel-types": "^6.24.1" + } + }, + "node_modules/babel-plugin-transform-exponentiation-operator": { + "version": "6.24.1", + "resolved": "https://registry.npmjs.org/babel-plugin-transform-exponentiation-operator/-/babel-plugin-transform-exponentiation-operator-6.24.1.tgz", + "integrity": "sha512-LzXDmbMkklvNhprr20//RStKVcT8Cu+SQtX18eMHLhjHf2yFzwtQ0S2f0jQ+89rokoNdmwoSqYzAhq86FxlLSQ==", + "dependencies": { + "babel-helper-builder-binary-assignment-operator-visitor": "^6.24.1", + "babel-plugin-syntax-exponentiation-operator": "^6.8.0", + "babel-runtime": "^6.22.0" + } + }, "node_modules/babel-plugin-transform-flow-enums": { "version": "0.0.2", "resolved": "https://registry.npmjs.org/babel-plugin-transform-flow-enums/-/babel-plugin-transform-flow-enums-0.0.2.tgz", @@ -6260,6 +6496,15 @@ "@babel/plugin-syntax-flow": "^7.12.1" } }, + "node_modules/babel-plugin-transform-object-rest-spread": { + "version": "6.26.0", + "resolved": "https://registry.npmjs.org/babel-plugin-transform-object-rest-spread/-/babel-plugin-transform-object-rest-spread-6.26.0.tgz", + "integrity": "sha512-ocgA9VJvyxwt+qJB0ncxV8kb/CjfTcECUY4tQ5VT7nP6Aohzobm8CDFaQ5FHdvZQzLmf0sgDxB8iRXZXxwZcyA==", + "dependencies": { + "babel-plugin-syntax-object-rest-spread": "^6.8.0", + "babel-runtime": "^6.26.0" + } + }, "node_modules/babel-preset-expo": { "version": "9.5.2", "resolved": "https://registry.npmjs.org/babel-preset-expo/-/babel-preset-expo-9.5.2.tgz", @@ -6312,6 +6557,124 @@ "@babel/core": "^7.0.0" } }, + "node_modules/babel-preset-stage-2": { + "version": "6.24.1", + "resolved": "https://registry.npmjs.org/babel-preset-stage-2/-/babel-preset-stage-2-6.24.1.tgz", + "integrity": "sha512-9F+nquz+37PrlTSBdpeQBKnQfAMNBnryXw+m4qBh35FNbJPfzZz+sjN2G5Uf1CRedU9PH7fJkTbYijxmkLX8Og==", + "dependencies": { + "babel-plugin-syntax-dynamic-import": "^6.18.0", + "babel-plugin-transform-class-properties": "^6.24.1", + "babel-plugin-transform-decorators": "^6.24.1", + "babel-preset-stage-3": "^6.24.1" + } + }, + "node_modules/babel-preset-stage-3": { + "version": "6.24.1", + "resolved": "https://registry.npmjs.org/babel-preset-stage-3/-/babel-preset-stage-3-6.24.1.tgz", + "integrity": "sha512-eCbEOF8uN0KypFXJmZXn2sTk7bPV9uM5xov7G/7BM08TbQEObsVs0cEWfy6NQySlfk7JBi/t+XJP1JkruYfthA==", + "dependencies": { + "babel-plugin-syntax-trailing-function-commas": "^6.22.0", + "babel-plugin-transform-async-generator-functions": "^6.24.1", + "babel-plugin-transform-async-to-generator": "^6.24.1", + "babel-plugin-transform-exponentiation-operator": "^6.24.1", + "babel-plugin-transform-object-rest-spread": "^6.22.0" + } + }, + "node_modules/babel-preset-stage-3/node_modules/babel-plugin-syntax-trailing-function-commas": { + "version": "6.22.0", + "resolved": "https://registry.npmjs.org/babel-plugin-syntax-trailing-function-commas/-/babel-plugin-syntax-trailing-function-commas-6.22.0.tgz", + "integrity": "sha512-Gx9CH3Q/3GKbhs07Bszw5fPTlU+ygrOGfAhEt7W2JICwufpC4SuO0mG0+4NykPBSYPMJhqvVlDBU17qB1D+hMQ==" + }, + "node_modules/babel-runtime": { + "version": "6.26.0", + "resolved": "https://registry.npmjs.org/babel-runtime/-/babel-runtime-6.26.0.tgz", + "integrity": "sha512-ITKNuq2wKlW1fJg9sSW52eepoYgZBggvOAHC0u/CYu/qxQ9EVzThCgR69BnSXLHjy2f7SY5zaQ4yt7H9ZVxY2g==", + "dependencies": { + "core-js": "^2.4.0", + "regenerator-runtime": "^0.11.0" + } + }, + "node_modules/babel-runtime/node_modules/regenerator-runtime": { + "version": "0.11.1", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz", + "integrity": "sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg==" + }, + "node_modules/babel-template": { + "version": "6.26.0", + "resolved": "https://registry.npmjs.org/babel-template/-/babel-template-6.26.0.tgz", + "integrity": "sha512-PCOcLFW7/eazGUKIoqH97sO9A2UYMahsn/yRQ7uOk37iutwjq7ODtcTNF+iFDSHNfkctqsLRjLP7URnOx0T1fg==", + "dependencies": { + "babel-runtime": "^6.26.0", + "babel-traverse": "^6.26.0", + "babel-types": "^6.26.0", + "babylon": "^6.18.0", + "lodash": "^4.17.4" + } + }, + "node_modules/babel-traverse": { + "version": "6.26.0", + "resolved": "https://registry.npmjs.org/babel-traverse/-/babel-traverse-6.26.0.tgz", + "integrity": "sha512-iSxeXx7apsjCHe9c7n8VtRXGzI2Bk1rBSOJgCCjfyXb6v1aCqE1KSEpq/8SXuVN8Ka/Rh1WDTF0MDzkvTA4MIA==", + "dependencies": { + "babel-code-frame": "^6.26.0", + "babel-messages": "^6.23.0", + "babel-runtime": "^6.26.0", + "babel-types": "^6.26.0", + "babylon": "^6.18.0", + "debug": "^2.6.8", + "globals": "^9.18.0", + "invariant": "^2.2.2", + "lodash": "^4.17.4" + } + }, + "node_modules/babel-traverse/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/babel-traverse/node_modules/globals": { + "version": "9.18.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-9.18.0.tgz", + "integrity": "sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/babel-traverse/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/babel-types": { + "version": "6.26.0", + "resolved": "https://registry.npmjs.org/babel-types/-/babel-types-6.26.0.tgz", + "integrity": "sha512-zhe3V/26rCWsEZK8kZN+HaQj5yQ1CilTObixFzKW1UWjqG7618Twz6YEsCnjfg5gBcJh02DrpCkS9h98ZqDY+g==", + "dependencies": { + "babel-runtime": "^6.26.0", + "esutils": "^2.0.2", + "lodash": "^4.17.4", + "to-fast-properties": "^1.0.3" + } + }, + "node_modules/babel-types/node_modules/to-fast-properties": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-1.0.3.tgz", + "integrity": "sha512-lxrWP8ejsq+7E3nNjwYmUBMAgjMTZoTI+sdBOpvNyijeDLa29LUn9QaoXAHv4+Z578hbmHHJKZknzxVtvo77og==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/babylon": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/babylon/-/babylon-6.18.0.tgz", + "integrity": "sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ==", + "bin": { + "babylon": "bin/babylon.js" + } + }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", @@ -6968,6 +7331,13 @@ "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==" }, + "node_modules/core-js": { + "version": "2.6.12", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.12.tgz", + "integrity": "sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ==", + "deprecated": "core-js@<3.23.3 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Some versions have web compatibility issues. Please, upgrade your dependencies to the actual version of core-js.", + "hasInstallScript": true + }, "node_modules/core-js-compat": { "version": "3.33.1", "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.33.1.tgz", @@ -8129,6 +8499,25 @@ "graphql": "^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" } }, + "node_modules/has-ansi": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", + "integrity": "sha512-C8vBJ8DwUCx19vhm7urhTuUsr4/IyP6l4VzNQDv+ryHQObW3TTTp9yB68WpYgRe2bbaGuZ/se74IqFeVnMnLZg==", + "dependencies": { + "ansi-regex": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/has-ansi/node_modules/ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -11885,6 +12274,15 @@ "react": "18.2.0" } }, + "node_modules/react-native-animated-ellipsis": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/react-native-animated-ellipsis/-/react-native-animated-ellipsis-2.0.0.tgz", + "integrity": "sha512-a/dyEbJZsbK3ZWJuhnEraOpXb/JjinPWN+CaZzZnsrCX1FTqsatV/k2/qoJlARWYPDqR9YZpI828+pe/b+JVDA==", + "dependencies": { + "babel-preset-stage-2": "^6.24.1", + "prop-types": "^15.5.10" + } + }, "node_modules/react-native-device-info": { "version": "10.11.0", "resolved": "https://registry.npmjs.org/react-native-device-info/-/react-native-device-info-10.11.0.tgz", diff --git a/App/package.json b/App/package.json index d9594a6..6f3cde2 100644 --- a/App/package.json +++ b/App/package.json @@ -16,6 +16,7 @@ "expo-status-bar": "~1.6.0", "react": "18.2.0", "react-native": "0.72.6", + "react-native-animated-ellipsis": "^2.0.0", "react-native-device-info": "^10.11.0" }, "devDependencies": { diff --git a/Dockerfile b/Dockerfile index bb700f7..226edc3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,16 +1,36 @@ -FROM alpine:3.14 +FROM node:18-bookworm-slim -RUN sudo apt install conda -RUN conda env create -f ./build_utils/env.yml +# ARG REACT_NATIVE_PACKAGER_HOSTNAME -COPY ./App -COPY ./backend.py -COPY ./dbGen.py -COPY ./medmini.py -COPY ./mashqa_data -COPY ./build_utils/run.sh +WORKDIR /medmini +COPY . . -RUN conda activate medmini -RUN chmod +x run.sh -RUN run.sh \ No newline at end of file + +RUN mv build_utils/services.sh services.sh \ + && chmod +x services.sh \ + && apt-get update \ + # && apt install curl -y \ + && cd App && npm ci && cd .. \ + && apt-get update \ + && apt-get install python3 python3-pip -y \ + && rm -rf /usr/lib/python3.11/EXTERNALLY-MANAGED \ + && pip3 install torch==2.1.0 --index-url https://download.pytorch.org/whl/cpu \ + && pip3 install transformers==4.34.1 langchain==0.0.324 Flask==3.0.0 flask-cors==4.0.0 chromadb==0.4.15 sentence-transformers==2.2.2 sentencepiece==0.1.99 + +# SHELL ["/bin/bash", "--login", "-i", "-c"] +# SHELL ["/bin/bash", "--login", "-c"] + + + + +# EXPOSE 19000 19001 19002 +EXPOSE 8081 5000 + + +CMD ./services.sh + + + +# todo +# use a smaller base image \ No newline at end of file diff --git a/backend.py b/backend.py index f49f57f..0e1f94f 100644 --- a/backend.py +++ b/backend.py @@ -1,10 +1,11 @@ from flask import Flask, request; from flask_cors import CORS, cross_origin -# from pegasus import pegasus;\ -from medmini import infer +from medmini import infer,systemInit app = Flask(__name__) CORS(app) +summarizer=None +vectordb=None @app.route('/', methods=['GET', 'POST']) @cross_origin(supports_credentials=True) @@ -18,7 +19,10 @@ def root(): def x(): # print(request.get_json(force=True)["question"]) # return request.get_json(force=True)["text"] - return infer(request.get_json(force=True)["question"]) + global summarizer + global vectordb + return infer(request.get_json(force=True)["question"],summarizer,vectordb) if __name__ == '__main__': - app.run(host='192.168.196.219', port=5000, debug=True) \ No newline at end of file + summarizer,vectordb=systemInit() + app.run(host="0.0.0.0",port=5000,debug=True) \ No newline at end of file diff --git a/build_utils/env.yml b/build_utils/env.yml deleted file mode 100644 index bec15c6..0000000 --- a/build_utils/env.yml +++ /dev/null @@ -1,209 +0,0 @@ -name: medmini -channels: - - defaults -dependencies: - - _libgcc_mutex=0.1=main - - _openmp_mutex=5.1=1_gnu - - abseil-cpp=20211102.0=hd4dd3e8_0 - - aiohttp=3.8.5=py311h5eee18b_0 - - aiosignal=1.2.0=pyhd3eb1b0_0 - - arrow-cpp=11.0.0=h374c478_2 - - async-timeout=4.0.2=py311h06a4308_0 - - attrs=23.1.0=py311h06a4308_0 - - aws-c-common=0.6.8=h5eee18b_1 - - aws-c-event-stream=0.1.6=h6a678d5_6 - - aws-checksums=0.1.11=h5eee18b_2 - - aws-sdk-cpp=1.8.185=h721c034_1 - - blas=1.0=mkl - - boost-cpp=1.82.0=hdb19cb5_2 - - bottleneck=1.3.5=py311hbed6279_0 - - brotli-python=1.0.9=py311h6a678d5_7 - - bzip2=1.0.8=h7b6447c_0 - - c-ares=1.19.1=h5eee18b_0 - - ca-certificates=2023.08.22=h06a4308_0 - - certifi=2023.7.22=py311h06a4308_0 - - cffi=1.15.1=py311h5eee18b_3 - - charset-normalizer=2.0.4=pyhd3eb1b0_0 - - click=8.1.7=py311h06a4308_0 - - cryptography=41.0.3=py311hdda0065_0 - - datasets=2.12.0=py311h06a4308_0 - - dill=0.3.6=py311h06a4308_0 - - filelock=3.9.0=py311h06a4308_0 - - flask=2.2.2=py311h06a4308_0 - - frozenlist=1.3.3=py311h5eee18b_0 - - fsspec=2023.9.2=py311h06a4308_0 - - gflags=2.2.2=he6710b0_0 - - glog=0.5.0=h2531618_0 - - grpc-cpp=1.48.2=he1ff14a_1 - - huggingface_hub=0.17.3=py311h06a4308_0 - - icu=73.1=h6a678d5_0 - - idna=3.4=py311h06a4308_0 - - importlib-metadata=6.0.0=py311h06a4308_0 - - intel-openmp=2023.1.0=hdb19cb5_46305 - - itsdangerous=2.0.1=pyhd3eb1b0_0 - - jinja2=3.1.2=py311h06a4308_0 - - krb5=1.20.1=h143b758_1 - - ld_impl_linux-64=2.38=h1181459_1 - - libboost=1.82.0=h109eef0_2 - - libbrotlicommon=1.0.9=h5eee18b_7 - - libbrotlidec=1.0.9=h5eee18b_7 - - libbrotlienc=1.0.9=h5eee18b_7 - - libcurl=8.4.0=h251f7ec_0 - - libedit=3.1.20221030=h5eee18b_0 - - libev=4.33=h7f8727e_1 - - libevent=2.1.12=hdbd6064_1 - - libffi=3.4.4=h6a678d5_0 - - libgcc-ng=11.2.0=h1234567_1 - - libgomp=11.2.0=h1234567_1 - - libnghttp2=1.57.0=h2d74bed_0 - - libprotobuf=3.20.3=he621ea3_0 - - libssh2=1.10.0=hdbd6064_2 - - libstdcxx-ng=11.2.0=h1234567_1 - - libthrift=0.15.0=h1795dd8_2 - - libuuid=1.41.5=h5eee18b_0 - - lz4-c=1.9.4=h6a678d5_0 - - markupsafe=2.1.1=py311h5eee18b_0 - - mkl=2023.1.0=h213fc3f_46343 - - mkl-service=2.4.0=py311h5eee18b_1 - - mkl_fft=1.3.8=py311h5eee18b_0 - - mkl_random=1.2.4=py311hdb19cb5_0 - - multidict=6.0.2=py311h5eee18b_0 - - multiprocess=0.70.14=py311h06a4308_0 - - ncurses=6.4=h6a678d5_0 - - numexpr=2.8.7=py311h65dcdc2_0 - - numpy=1.26.0=py311h08b1b3b_0 - - numpy-base=1.26.0=py311hf175353_0 - - openssl=3.0.11=h7f8727e_2 - - orc=1.7.4=hb3bc3d3_1 - - packaging=23.1=py311h06a4308_0 - - pandas=2.1.1=py311ha02d727_0 - - pip=23.3=py311h06a4308_0 - - pyarrow=11.0.0=py311hd8e8d9b_1 - - pycparser=2.21=pyhd3eb1b0_0 - - pyopenssl=23.2.0=py311h06a4308_0 - - pysocks=1.7.1=py311h06a4308_0 - - python=3.11.5=h955ad1f_0 - - python-dateutil=2.8.2=pyhd3eb1b0_0 - - python-tzdata=2023.3=pyhd3eb1b0_0 - - python-xxhash=2.0.2=py311h5eee18b_1 - - pytz=2023.3.post1=py311h06a4308_0 - - pyyaml=6.0.1=py311h5eee18b_0 - - re2=2022.04.01=h295c915_0 - - readline=8.2=h5eee18b_0 - - regex=2023.10.3=py311h5eee18b_0 - - requests=2.31.0=py311h06a4308_0 - - responses=0.13.3=pyhd3eb1b0_0 - - safetensors=0.4.0=py311h24d97f6_0 - - setuptools=68.0.0=py311h06a4308_0 - - six=1.16.0=pyhd3eb1b0_1 - - snappy=1.1.9=h295c915_0 - - sqlite=3.41.2=h5eee18b_0 - - tbb=2021.8.0=hdb19cb5_0 - - tk=8.6.12=h1ccaba5_0 - - tokenizers=0.13.3=py311h22610ee_0 - - tqdm=4.65.0=py311h92b7b1e_0 - - transformers=4.32.1=py311h06a4308_0 - - tzdata=2023c=h04d1e81_0 - - urllib3=1.26.18=py311h06a4308_0 - - utf8proc=2.6.1=h27cfd23_0 - - werkzeug=2.2.3=py311h06a4308_0 - - wheel=0.41.2=py311h06a4308_0 - - xxhash=0.8.0=h7f8727e_3 - - xz=5.4.2=h5eee18b_0 - - yaml=0.2.5=h7b6447c_0 - - yarl=1.8.1=py311h5eee18b_0 - - zipp=3.11.0=py311h06a4308_0 - - zlib=1.2.13=h5eee18b_0 - - zstd=1.5.5=hc292b87_0 - - pip: - - accelerate==0.24.0 - - annotated-types==0.6.0 - - anyio==3.7.1 - - backoff==2.2.1 - - bcrypt==4.0.1 - - cachetools==5.3.2 - - chroma-hnswlib==0.7.3 - - chromadb==0.4.15 - - coloredlogs==15.0.1 - - dataclasses-json==0.6.1 - - deprecated==1.2.14 - - fastapi==0.104.0 - - flask-cors==4.0.0 - - flatbuffers==23.5.26 - - google-auth==2.23.3 - - googleapis-common-protos==1.61.0 - - greenlet==3.0.1 - - grpcio==1.59.0 - - h11==0.14.0 - - httptools==0.6.1 - - humanfriendly==10.0 - - importlib-resources==6.1.0 - - joblib==1.3.2 - - jsonpatch==1.33 - - jsonpointer==2.4 - - kubernetes==28.1.0 - - langchain==0.0.325 - - langsmith==0.0.53 - - marshmallow==3.20.1 - - monotonic==1.6 - - mpmath==1.3.0 - - mypy-extensions==1.0.0 - - networkx==3.2.1 - - nltk==3.8.1 - - nvidia-cublas-cu12==12.1.3.1 - - nvidia-cuda-cupti-cu12==12.1.105 - - nvidia-cuda-nvrtc-cu12==12.1.105 - - nvidia-cuda-runtime-cu12==12.1.105 - - nvidia-cudnn-cu12==8.9.2.26 - - nvidia-cufft-cu12==11.0.2.54 - - nvidia-curand-cu12==10.3.2.106 - - nvidia-cusolver-cu12==11.4.5.107 - - nvidia-cusparse-cu12==12.1.0.106 - - nvidia-nccl-cu12==2.18.1 - - nvidia-nvjitlink-cu12==12.3.52 - - nvidia-nvtx-cu12==12.1.105 - - oauthlib==3.2.2 - - onnxruntime==1.16.1 - - opentelemetry-api==1.20.0 - - opentelemetry-exporter-otlp-proto-common==1.20.0 - - opentelemetry-exporter-otlp-proto-grpc==1.20.0 - - opentelemetry-proto==1.20.0 - - opentelemetry-sdk==1.20.0 - - opentelemetry-semantic-conventions==0.41b0 - - overrides==7.4.0 - - pillow==10.1.0 - - posthog==3.0.2 - - protobuf==4.24.4 - - psutil==5.9.6 - - pulsar-client==3.3.0 - - pyasn1==0.5.0 - - pyasn1-modules==0.3.0 - - pydantic==2.4.2 - - pydantic-core==2.10.1 - - pypika==0.48.9 - - python-dotenv==1.0.0 - - requests-oauthlib==1.3.1 - - rsa==4.9 - - scikit-learn==1.3.2 - - scipy==1.11.3 - - sentence-transformers==2.2.2 - - sentencepiece==0.1.99 - - sniffio==1.3.0 - - sqlalchemy==2.0.22 - - starlette==0.27.0 - - sympy==1.12 - - tenacity==8.2.3 - - threadpoolctl==3.2.0 - - torch==2.1.0 - - torchvision==0.16.0 - - triton==2.1.0 - - typer==0.9.0 - - typing-extensions==4.8.0 - - typing-inspect==0.9.0 - - uvicorn==0.23.2 - - uvloop==0.19.0 - - watchfiles==0.21.0 - - websocket-client==1.6.4 - - websockets==12.0 - - wrapt==1.15.0 -prefix: /home/rohan/miniconda3/envs/Megathon diff --git a/build_utils/requirements.txt b/build_utils/requirements.txt new file mode 100644 index 0000000..662d2cb --- /dev/null +++ b/build_utils/requirements.txt @@ -0,0 +1,8 @@ +transformers +langchain +Flask +flask-cors +chromadb +sentence-transformers +sentencepiece +torch --index-url https://download.pytorch.org/whl/cpu diff --git a/build_utils/run.sh b/build_utils/run.sh deleted file mode 100644 index b5036c0..0000000 --- a/build_utils/run.sh +++ /dev/null @@ -1,9 +0,0 @@ -if ! [ -f ./mashqa_data/med_db ] -then - cd mashqa_data - python format.py - cd ..; python dbGen.py -fi - -python backend.py -cd App; npx expo start diff --git a/build_utils/services.sh b/build_utils/services.sh new file mode 100644 index 0000000..87c41d6 --- /dev/null +++ b/build_utils/services.sh @@ -0,0 +1,12 @@ +#!/bin/bash + + + + +python3 backend.py & +cd App/ ; echo EXPO_PUBLIC_HOSTIP=$REACT_NATIVE_PACKAGER_HOSTNAME > .env +npx expo start & + +wait -n + +exit $? \ No newline at end of file diff --git a/install.sh b/install.sh new file mode 100644 index 0000000..ea9794a --- /dev/null +++ b/install.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +# updating repositories and installing curl +apt update +apt install curl -y + +# installing node and npm +curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.5/install.sh | bash +source ~/.bashrc +nvm install v18.13.0 + + +# installing all node modules required +cd App +npm ci +cd .. + + +# setting up python deps +apt update +apt install python3.10 python3-pip -y +pip3 install torch --index-url https://download.pytorch.org/whl/cpu +pip3 install transformers langchain Flask flask-cors chromadb sentence-transformers sentencepiece + + +#todo +# pull the med_db + diff --git a/medmini.py b/medmini.py index 83050d1..f25080b 100644 --- a/medmini.py +++ b/medmini.py @@ -1,4 +1,4 @@ -from transformers import pipeline,GPT2Model,GPT2Tokenizer,AutoModelForCausalLM, AutoTokenizer,BitsAndBytesConfig +from transformers import pipeline import torch from langchain.vectorstores import Chroma from langchain.embeddings import HuggingFaceEmbeddings @@ -14,13 +14,9 @@ def formatPrompt(prompt,context): -def infer(prompt): - - # prompt = f'What are some ways to deal with bipolar disorder?' - - - ### RAG +def systemInit(): + # initializing the vector db model_name = "sentence-transformers/all-mpnet-base-v2" model_kwargs = {'device': 'cpu'} encode_kwargs = {'normalize_embeddings': False} @@ -29,12 +25,23 @@ def infer(prompt): model_kwargs=model_kwargs, encode_kwargs=encode_kwargs ) + vectordb=Chroma(persist_directory='./med_db',embedding_function=hf) + # initializing the model + summarizer = pipeline("summarization", model="Falconsai/medical_summarization") + return summarizer,vectordb - vectordb=Chroma(persist_directory='./med_db',embedding_function=hf) + + +def infer(prompt,summarizer,vectordb): + + # prompt = f'What are some ways to deal with bipolar disorder?' + + + ### RAG # print(vectordb._collection.count()) - docs = vectordb.similarity_search(prompt,k=6) + docs = vectordb.similarity_search(prompt,k=4) # print(len(docs)) # print(docs) context=' '.join(d.page_content for d in docs) @@ -45,101 +52,98 @@ def useModel(model_name): if model_name=='summarizer': #### Summarization - # from transformers import pipeline - - summarizer = pipeline("summarization", model="Falconsai/medical_summarization") - output = summarizer(context, max_length=512, min_length=32, do_sample=False) + output = summarizer(formatPrompt(prompt,context), max_length=512, min_length=32, do_sample=False) - elif model_name=='gpt2': + # elif model_name=='gpt2': - #### Base GPT2 unquantized - low_cpu=True + # #### Base GPT2 unquantized + # low_cpu=True - # args={"low_cpu_mem_usage":low_cpu,"device":'cpu',"load_in_8bit":quantized,"load_in_4bit":extreme_quantization,"torch_dtype":torch.float32} - args={"low_cpu_mem_usage":low_cpu,"device_map":'cpu',"torch_dtype":torch.float32} - pipe = pipeline('text-generation', model='gpt2',model_kwargs=args) + # # args={"low_cpu_mem_usage":low_cpu,"device":'cpu',"load_in_8bit":quantized,"load_in_4bit":extreme_quantization,"torch_dtype":torch.float32} + # args={"low_cpu_mem_usage":low_cpu,"device_map":'cpu',"torch_dtype":torch.float32} + # pipe = pipeline('text-generation', model='gpt2',model_kwargs=args) - output=pipe(formatPrompt(prompt, context), max_new_tokens=60) + # output=pipe(formatPrompt(prompt, context), max_new_tokens=60) - elif model_name=='gpt2_quantized': + # elif model_name=='gpt2_quantized': - #### Base GPT2 quantized - tokenizer = AutoTokenizer.from_pretrained("gpt2", trust_remote_code=True) - tokenizer.pad_token = tokenizer.eos_token + # #### Base GPT2 quantized + # tokenizer = AutoTokenizer.from_pretrained("gpt2", trust_remote_code=True) + # tokenizer.pad_token = tokenizer.eos_token - bnb_config = BitsAndBytesConfig( - load_in_4bit=True, - bnb_4bit_use_double_quant=True, - bnb_4bit_quant_type="nf4", - bnb_4bit_compute_dtype=torch.float16 - ) + # bnb_config = BitsAndBytesConfig( + # load_in_4bit=True, + # bnb_4bit_use_double_quant=True, + # bnb_4bit_quant_type="nf4", + # bnb_4bit_compute_dtype=torch.float16 + # ) - model = AutoModelForCausalLM.from_pretrained( - "gpt2", - quantization_config=bnb_config, - trust_remote_code=True - ) - model.config.use_cache = False + # model = AutoModelForCausalLM.from_pretrained( + # "gpt2", + # quantization_config=bnb_config, + # trust_remote_code=True + # ) + # model.config.use_cache = False - pipe=pipeline('text-generation',model=model,tokenizer=tokenizer) - output=pipe(formatPrompt(prompt,context)) + # pipe=pipeline('text-generation',model=model,tokenizer=tokenizer) + # output=pipe(formatPrompt(prompt,context)) - elif model_name=='phi1.5_quantized': + # elif model_name=='phi1.5_quantized': - #### Phi 1.5 quantized/unquantized - tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-1_5", trust_remote_code=True) - # tokenizer = AutoTokenizer.from_pretrained("google/flan-t5-small", trust_remote_code=True) - tokenizer.pad_token = tokenizer.eos_token + # #### Phi 1.5 quantized/unquantized + # tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-1_5", trust_remote_code=True) + # # tokenizer = AutoTokenizer.from_pretrained("google/flan-t5-small", trust_remote_code=True) + # tokenizer.pad_token = tokenizer.eos_token - bnb_config = BitsAndBytesConfig( - load_in_4bit=True, - bnb_4bit_use_double_quant=True, - bnb_4bit_quant_type="nf4", - bnb_4bit_compute_dtype=torch.float16 - ) + # bnb_config = BitsAndBytesConfig( + # load_in_4bit=True, + # bnb_4bit_use_double_quant=True, + # bnb_4bit_quant_type="nf4", + # bnb_4bit_compute_dtype=torch.float16 + # ) - model = AutoModelForCausalLM.from_pretrained( - "microsoft/phi-1_5", - # "google/flan-t5-small", - # quantization_config=bnb_config, - trust_remote_code=True - ) + # model = AutoModelForCausalLM.from_pretrained( + # "microsoft/phi-1_5", + # # "google/flan-t5-small", + # # quantization_config=bnb_config, + # trust_remote_code=True + # ) - pipe=pipeline('text-generation',model=model,tokenizer=tokenizer) - output=pipe(formatPrompt(prompt,context), max_new_tokens=60) + # pipe=pipeline('text-generation',model=model,tokenizer=tokenizer) + # output=pipe(formatPrompt(prompt,context), max_new_tokens=60) - elif model_name=='phi1.5': + # elif model_name=='phi1.5': - ### Phi 1.5 official - torch.set_default_device("cpu") - model = AutoModelForCausalLM.from_pretrained("microsoft/phi-1_5", trust_remote_code=True,device_map='cuda') - tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-1_5", trust_remote_code=True) - inputs = tokenizer(formatPrompt(prompt,context), return_tensors="pt", return_attention_mask=False) + # ### Phi 1.5 official + # torch.set_default_device("cpu") + # model = AutoModelForCausalLM.from_pretrained("microsoft/phi-1_5", trust_remote_code=True,device_map='cuda') + # tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-1_5", trust_remote_code=True) + # inputs = tokenizer(formatPrompt(prompt,context), return_tensors="pt", return_attention_mask=False) - outputs = model.generate(**inputs) - output = tokenizer.batch_decode(outputs)[0] + # outputs = model.generate(**inputs) + # output = tokenizer.batch_decode(outputs)[0] - elif model_name=='flant5': - from transformers import T5Tokenizer, T5ForConditionalGeneration + # elif model_name=='flant5': + # from transformers import T5Tokenizer, T5ForConditionalGeneration - tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-large") - model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-large") + # tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-large") + # model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-large") - input_text = formatPrompt(prompt, context) - input_ids = tokenizer(input_text, return_tensors="pt").input_ids + # input_text = formatPrompt(prompt, context) + # input_ids = tokenizer(input_text, return_tensors="pt").input_ids - outputs= model.generate(input_ids, max_new_tokens=400) - output = tokenizer.decode(outputs[0]) + # outputs= model.generate(input_ids, max_new_tokens=400) + # output = tokenizer.decode(outputs[0]) - elif model_name=='gpt2_finetuned': + # elif model_name=='gpt2_finetuned': - #### Finetuned GPT2 model - model = AutoModelForCausalLM.from_pretrained("SidhiPanda/gpt2-finetuned-megathon", trust_remote_code=True, torch_dtype=torch.float32) - tokenizer = AutoTokenizer.from_pretrained("gpt2", trust_remote_code=True) - inputs = tokenizer(formatPrompt(prompt,context), return_tensors="pt", return_attention_mask=False) + # #### Finetuned GPT2 model + # model = AutoModelForCausalLM.from_pretrained("SidhiPanda/gpt2-finetuned-megathon", trust_remote_code=True, torch_dtype=torch.float32) + # tokenizer = AutoTokenizer.from_pretrained("gpt2", trust_remote_code=True) + # inputs = tokenizer(formatPrompt(prompt,context), return_tensors="pt", return_attention_mask=False) - outputs = model.generate(**inputs, max_new_tokens=45) - output = tokenizer.batch_decode(outputs)[0] + # outputs = model.generate(**inputs, max_new_tokens=45) + # output = tokenizer.batch_decode(outputs)[0] @@ -148,3 +152,5 @@ def useModel(model_name): return output[0]['summary_text'] return useModel('summarizer') + + diff --git a/run.sh b/run.sh new file mode 100644 index 0000000..589cc95 --- /dev/null +++ b/run.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +# subshell for starting frontedn and backend \ No newline at end of file