Browse Source

新增多ip多篇并发flask

master
majiahui@haimaqingfan.com 2 years ago
parent
commit
9b75f389e9
  1. 2
      .idea/ChatGPT_Sever.iml
  2. 6
      .idea/deployment.xml
  3. 2
      .idea/misc.xml
  4. 2
      .idea/sshConfigs.xml
  5. 4
      .idea/webServers.xml
  6. 376
      .idea/workspace.xml
  7. 30
      chatGPT账号 26.txt
  8. 56
      chatgpt_post.py
  9. 714
      flask_serve_2.py
  10. 51
      flask_serves.py
  11. 725
      flask_sever_1.py
  12. 11
      json_模板.json
  13. 203
      lock_flask.py
  14. 45
      serve_config_1.py
  15. 43
      serve_config_2.py
  16. 20
      查询uuid.py
  17. 22
      测试chatgpt调用接口.py
  18. 13
      测试flask多进程.py
  19. 3
      测试多进程.py
  20. 2
      测试生成uuid.py
  21. 2
      简单的flask.py

2
.idea/ChatGPT_Sever.iml

@ -5,7 +5,7 @@
</component>
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="jdk" jdkName="Remote Python 3.9.16 (sftp://majiahui@104.244.90.248:28385/home/majiahui/miniconda3/envs/chatgpt/bin/python)" jdkType="Python SDK" />
<orderEntry type="jdk" jdkName="Remote Python 3.9.16 (sftp://majiahui@104.244.89.190:27998/home/majiahui/miniconda3/envs/chatgpt/bin/python3)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="TemplatesService">

6
.idea/deployment.xml

@ -1,15 +1,15 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="PublishConfigData" autoUpload="Always" serverName="majiahui@104.244.90.248:28385" remoteFilesAllowedToDisappearOnAutoupload="false">
<component name="PublishConfigData" autoUpload="Always" serverName="majiahui@104.244.89.190:27998" remoteFilesAllowedToDisappearOnAutoupload="false">
<serverData>
<paths name="chat">
<serverdata>
<mappings>
<mapping local="$PROJECT_DIR$" web="/" />
<mapping deploy="/" local="$PROJECT_DIR$" web="/" />
</mappings>
</serverdata>
</paths>
<paths name="majiahui@104.244.90.248:28385">
<paths name="majiahui@104.244.89.190:27998">
<serverdata>
<mappings>
<mapping deploy="/home/majiahui/ChatGPT_Sever" local="$PROJECT_DIR$" />

2
.idea/misc.xml

@ -1,4 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Remote Python 3.9.16 (sftp://majiahui@104.244.90.248:28385/home/majiahui/miniconda3/envs/chatgpt/bin/python)" project-jdk-type="Python SDK" />
<component name="ProjectRootManager" version="2" project-jdk-name="Remote Python 3.9.16 (sftp://majiahui@104.244.89.190:27998/home/majiahui/miniconda3/envs/chatgpt/bin/python3)" project-jdk-type="Python SDK" />
</project>

2
.idea/sshConfigs.xml

@ -2,7 +2,7 @@
<project version="4">
<component name="SshConfigs">
<configs>
<sshConfig authType="PASSWORD" host="104.244.90.248" id="88e52b8d-a70e-4743-9a44-06aaefa0048a" port="28385" nameFormat="DESCRIPTIVE" username="majiahui" useOpenSSHConfig="true" />
<sshConfig authType="PASSWORD" host="104.244.89.190" id="88e52b8d-a70e-4743-9a44-06aaefa0048a" port="27998" nameFormat="DESCRIPTIVE" username="majiahui" useOpenSSHConfig="true" />
</configs>
</component>
</project>

4
.idea/webServers.xml

@ -2,8 +2,8 @@
<project version="4">
<component name="WebServers">
<option name="servers">
<webServer id="c2e40908-9bb6-423b-a163-c99a15eb604c" name="chat">
<fileTransfer rootFolder="/home/majiahui/ChatGPT_Sever" accessType="SFTP" host="104.244.90.248" port="28385" sshConfigId="88e52b8d-a70e-4743-9a44-06aaefa0048a" sshConfig="majiahui@104.244.90.248:28385 password">
<webServer id="f58df6e3-b80d-420d-b140-8a999839c6d7" name="chat">
<fileTransfer rootFolder="/home/majiahui/ChatGPT_Sever" accessType="SFTP" host="104.244.89.190" port="27998" sshConfigId="88e52b8d-a70e-4743-9a44-06aaefa0048a" sshConfig="majiahui@104.244.89.190:27998 password">
<advancedOptions>
<advancedOptions dataProtectionLevel="Private" keepAliveTimeout="0" passiveMode="true" shareSSLContext="true" />
</advancedOptions>

376
.idea/workspace.xml

@ -1,7 +1,27 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ChangeListManager">
<list default="true" id="5dac27f6-2de9-4ccf-bf99-64c76347d9c5" name="Changes" comment="" />
<list default="true" id="5dac27f6-2de9-4ccf-bf99-64c76347d9c5" name="Changes" comment="新增多api多篇并发flask">
<change afterPath="$PROJECT_DIR$/flask_serve_2.py" afterDir="false" />
<change afterPath="$PROJECT_DIR$/json_模板.json" afterDir="false" />
<change afterPath="$PROJECT_DIR$/serve_config_1.py" afterDir="false" />
<change afterPath="$PROJECT_DIR$/serve_config_2.py" afterDir="false" />
<change afterPath="$PROJECT_DIR$/测试flask多进程.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/.idea/ChatGPT_Sever.iml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/ChatGPT_Sever.iml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/.idea/deployment.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/deployment.xml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/.idea/misc.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/misc.xml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/.idea/sshConfigs.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/sshConfigs.xml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/.idea/webServers.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/webServers.xml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/flask_serves.py" beforeDir="false" afterPath="$PROJECT_DIR$/flask_serves.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/flask_sever_1.py" beforeDir="false" afterPath="$PROJECT_DIR$/flask_sever_1.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/lock_flask.py" beforeDir="false" afterPath="$PROJECT_DIR$/lock_flask.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/查询uuid.py" beforeDir="false" afterPath="$PROJECT_DIR$/查询uuid.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/测试chatgpt调用接口.py" beforeDir="false" afterPath="$PROJECT_DIR$/测试chatgpt调用接口.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/测试多进程.py" beforeDir="false" afterPath="$PROJECT_DIR$/测试多进程.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/测试生成uuid.py" beforeDir="false" afterPath="$PROJECT_DIR$/测试生成uuid.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/简单的flask.py" beforeDir="false" afterPath="$PROJECT_DIR$/简单的flask.py" afterDir="false" />
</list>
<option name="SHOW_DIALOG" value="false" />
<option name="HIGHLIGHT_CONFLICTS" value="true" />
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
@ -26,6 +46,11 @@
</option>
</component>
<component name="Git.Settings">
<option name="RECENT_BRANCH_BY_REPOSITORY">
<map>
<entry key="$PROJECT_DIR$" value="dev_1_" />
</map>
</option>
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$" />
</component>
<component name="MarkdownSettingsMigration">
@ -36,16 +61,16 @@
<option name="hideEmptyMiddlePackages" value="true" />
<option name="showLibraryContents" value="true" />
</component>
<component name="PropertiesComponent">{
&quot;keyToString&quot;: {
&quot;RunOnceActivity.OpenProjectViewOnStart&quot;: &quot;true&quot;,
&quot;RunOnceActivity.ShowReadmeOnStart&quot;: &quot;true&quot;,
&quot;WebServerToolWindowFactoryState&quot;: &quot;false&quot;,
&quot;settings.editor.selected.configurable&quot;: &quot;editor.preferences.fonts.default&quot;
<component name="PropertiesComponent"><![CDATA[{
"keyToString": {
"RunOnceActivity.OpenProjectViewOnStart": "true",
"RunOnceActivity.ShowReadmeOnStart": "true",
"WebServerToolWindowFactoryState": "false",
"settings.editor.selected.configurable": "com.jetbrains.python.configuration.PyActiveSdkModuleConfigurable"
}
}</component>
<component name="RunManager" selected="Python.ceshisuo">
<configuration name="ChatGPT_Sever" type="PythonConfigurationType" factoryName="Python">
}]]></component>
<component name="RunManager" selected="Python.测试生成uuid">
<configuration name="简单的" type="PythonConfigurationType" factoryName="Python" nameIsGenerated="true">
<module name="ChatGPT_Sever" />
<option name="INTERPRETER_OPTIONS" value="" />
<option name="PARENT_ENVS" value="true" />
@ -58,7 +83,7 @@
<option name="ADD_CONTENT_ROOTS" value="true" />
<option name="ADD_SOURCE_ROOTS" value="true" />
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/ceshi.py" />
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/简单的" />
<option name="PARAMETERS" value="" />
<option name="SHOW_COMMAND_LINE" value="false" />
<option name="EMULATE_TERMINAL" value="false" />
@ -67,7 +92,7 @@
<option name="INPUT_FILE" value="" />
<method v="2" />
</configuration>
<configuration name="GPT_service" type="PythonConfigurationType" factoryName="Python" nameIsGenerated="true">
<configuration name="ChatGPT_Sever" type="PythonConfigurationType" factoryName="Python">
<module name="ChatGPT_Sever" />
<option name="INTERPRETER_OPTIONS" value="" />
<option name="PARENT_ENVS" value="true" />
@ -80,7 +105,7 @@
<option name="ADD_CONTENT_ROOTS" value="true" />
<option name="ADD_SOURCE_ROOTS" value="true" />
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/GPT_service.py" />
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/ceshi.py" />
<option name="PARAMETERS" value="" />
<option name="SHOW_COMMAND_LINE" value="false" />
<option name="EMULATE_TERMINAL" value="false" />
@ -89,20 +114,20 @@
<option name="INPUT_FILE" value="" />
<method v="2" />
</configuration>
<configuration name="ceshiduojincheng" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
<configuration name="GPT_service" type="PythonConfigurationType" factoryName="Python" nameIsGenerated="true">
<module name="ChatGPT_Sever" />
<option name="INTERPRETER_OPTIONS" value="" />
<option name="PARENT_ENVS" value="true" />
<envs>
<env name="PYTHONUNBUFFERED" value="1" />
</envs>
<option name="SDK_HOME" value="" />
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
<option name="IS_MODULE_SDK" value="true" />
<option name="SDK_HOME" value="sftp://majiahui@104.244.90.248:28385/home/majiahui/miniconda3/envs/chatgpt/bin/python" />
<option name="WORKING_DIRECTORY" value="" />
<option name="IS_MODULE_SDK" value="false" />
<option name="ADD_CONTENT_ROOTS" value="true" />
<option name="ADD_SOURCE_ROOTS" value="true" />
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/ceshiduojincheng.py" />
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/GPT_service.py" />
<option name="PARAMETERS" value="" />
<option name="SHOW_COMMAND_LINE" value="false" />
<option name="EMULATE_TERMINAL" value="false" />
@ -133,7 +158,7 @@
<option name="INPUT_FILE" value="" />
<method v="2" />
</configuration>
<configuration name="ceshisuo" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
<configuration name="测试chatgpt调用接口" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
<module name="ChatGPT_Sever" />
<option name="INTERPRETER_OPTIONS" value="" />
<option name="PARENT_ENVS" value="true" />
@ -146,7 +171,7 @@
<option name="ADD_CONTENT_ROOTS" value="true" />
<option name="ADD_SOURCE_ROOTS" value="true" />
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/ceshisuo.py" />
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/测试chatgpt调用接口.py" />
<option name="PARAMETERS" value="" />
<option name="SHOW_COMMAND_LINE" value="false" />
<option name="EMULATE_TERMINAL" value="false" />
@ -155,7 +180,7 @@
<option name="INPUT_FILE" value="" />
<method v="2" />
</configuration>
<configuration name="cewshi" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
<configuration name="chatgpt_post" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
<module name="ChatGPT_Sever" />
<option name="INTERPRETER_OPTIONS" value="" />
<option name="PARENT_ENVS" value="true" />
@ -168,7 +193,29 @@
<option name="ADD_CONTENT_ROOTS" value="true" />
<option name="ADD_SOURCE_ROOTS" value="true" />
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/cewshi.py" />
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/chatgpt_post.py" />
<option name="PARAMETERS" value="" />
<option name="SHOW_COMMAND_LINE" value="false" />
<option name="EMULATE_TERMINAL" value="false" />
<option name="MODULE_MODE" value="false" />
<option name="REDIRECT_INPUT" value="false" />
<option name="INPUT_FILE" value="" />
<method v="2" />
</configuration>
<configuration name="简单的flask" type="PythonConfigurationType" factoryName="Python" nameIsGenerated="true">
<module name="ChatGPT_Sever" />
<option name="INTERPRETER_OPTIONS" value="" />
<option name="PARENT_ENVS" value="true" />
<envs>
<env name="PYTHONUNBUFFERED" value="1" />
</envs>
<option name="SDK_HOME" value="sftp://majiahui@104.244.90.248:28385/home/majiahui/miniconda3/envs/chatgpt/bin/python" />
<option name="WORKING_DIRECTORY" value="" />
<option name="IS_MODULE_SDK" value="false" />
<option name="ADD_CONTENT_ROOTS" value="true" />
<option name="ADD_SOURCE_ROOTS" value="true" />
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/简单的flask.py" />
<option name="PARAMETERS" value="" />
<option name="SHOW_COMMAND_LINE" value="false" />
<option name="EMULATE_TERMINAL" value="false" />
@ -199,6 +246,28 @@
<option name="INPUT_FILE" value="" />
<method v="2" />
</configuration>
<configuration name="flask_sever_1" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
<module name="ChatGPT_Sever" />
<option name="INTERPRETER_OPTIONS" value="" />
<option name="PARENT_ENVS" value="true" />
<envs>
<env name="PYTHONUNBUFFERED" value="1" />
</envs>
<option name="SDK_HOME" value="" />
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
<option name="IS_MODULE_SDK" value="true" />
<option name="ADD_CONTENT_ROOTS" value="true" />
<option name="ADD_SOURCE_ROOTS" value="true" />
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/flask_sever_1.py" />
<option name="PARAMETERS" value="" />
<option name="SHOW_COMMAND_LINE" value="false" />
<option name="EMULATE_TERMINAL" value="false" />
<option name="MODULE_MODE" value="false" />
<option name="REDIRECT_INPUT" value="false" />
<option name="INPUT_FILE" value="" />
<method v="2" />
</configuration>
<configuration name="lock_flask" type="PythonConfigurationType" factoryName="Python" nameIsGenerated="true">
<module name="ChatGPT_Sever" />
<option name="INTERPRETER_OPTIONS" value="" />
@ -397,23 +466,87 @@
<option name="INPUT_FILE" value="" />
<method v="2" />
</configuration>
<configuration name="查询uuid" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
<module name="ChatGPT_Sever" />
<option name="INTERPRETER_OPTIONS" value="" />
<option name="PARENT_ENVS" value="true" />
<envs>
<env name="PYTHONUNBUFFERED" value="1" />
</envs>
<option name="SDK_HOME" value="" />
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
<option name="IS_MODULE_SDK" value="true" />
<option name="ADD_CONTENT_ROOTS" value="true" />
<option name="ADD_SOURCE_ROOTS" value="true" />
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/查询uuid.py" />
<option name="PARAMETERS" value="" />
<option name="SHOW_COMMAND_LINE" value="false" />
<option name="EMULATE_TERMINAL" value="false" />
<option name="MODULE_MODE" value="false" />
<option name="REDIRECT_INPUT" value="false" />
<option name="INPUT_FILE" value="" />
<method v="2" />
</configuration>
<configuration name="查询uuid" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
<module name="ChatGPT_Sever" />
<option name="INTERPRETER_OPTIONS" value="" />
<option name="PARENT_ENVS" value="true" />
<envs>
<env name="PYTHONUNBUFFERED" value="1" />
</envs>
<option name="SDK_HOME" value="" />
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
<option name="IS_MODULE_SDK" value="true" />
<option name="ADD_CONTENT_ROOTS" value="true" />
<option name="ADD_SOURCE_ROOTS" value="true" />
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/查询uuid.py" />
<option name="PARAMETERS" value="" />
<option name="SHOW_COMMAND_LINE" value="false" />
<option name="EMULATE_TERMINAL" value="false" />
<option name="MODULE_MODE" value="false" />
<option name="REDIRECT_INPUT" value="false" />
<option name="INPUT_FILE" value="" />
<method v="2" />
</configuration>
<configuration name="Flask (简单的flask.py)" type="Python.FlaskServer" nameIsGenerated="true">
<module name="ChatGPT_Sever" />
<option name="target" value="$PROJECT_DIR$/简单的flask.py" />
<option name="targetType" value="PATH" />
<option name="INTERPRETER_OPTIONS" value="" />
<option name="PARENT_ENVS" value="true" />
<option name="SDK_HOME" value="" />
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
<option name="IS_MODULE_SDK" value="true" />
<option name="ADD_CONTENT_ROOTS" value="true" />
<option name="ADD_SOURCE_ROOTS" value="true" />
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
<option name="launchJavascriptDebuger" value="false" />
<method v="2" />
</configuration>
<list>
<item itemvalue="Flask server.Flask (简单的flask.py)" />
<item itemvalue="Python.简单的" />
<item itemvalue="Python.简单的flask" />
<item itemvalue="Python.lock_flask" />
<item itemvalue="Python.ceshiflask" />
<item itemvalue="Python.GPT_service" />
<item itemvalue="Python.ChatGPT_Sever" />
<item itemvalue="Python.flask_serves" />
<item itemvalue="Python.ceshiduojincheng" />
<item itemvalue="Python.cewshi" />
<item itemvalue="Python.ceshisuo" />
<item itemvalue="Python.chatgpt_post" />
<item itemvalue="Python.flask_sever_1" />
<item itemvalue="Python.查询uuid" />
<item itemvalue="Python.测试chatgpt调用接口" />
<item itemvalue="Python.测试生成uuid" />
</list>
<recent_temporary>
<list>
<item itemvalue="Python.ceshisuo" />
<item itemvalue="Python.cewshi" />
<item itemvalue="Python.测试生成uuid" />
<item itemvalue="Python.ceshiduojincheng" />
<item itemvalue="Python.flask_sever_1" />
<item itemvalue="Python.查询uuid" />
<item itemvalue="Python.chatgpt_post" />
<item itemvalue="Python.测试chatgpt调用接口" />
</list>
</recent_temporary>
</component>
@ -438,148 +571,141 @@
<workItem from="1680409042412" duration="4429000" />
<workItem from="1680421437562" duration="496000" />
<workItem from="1680438141204" duration="21342000" />
<workItem from="1680510884151" duration="586000" />
<workItem from="1680510884151" duration="938000" />
<workItem from="1680511931707" duration="3409000" />
<workItem from="1680515486728" duration="29349000" />
<workItem from="1680664432032" duration="82861000" />
</task>
<task id="LOCAL-00001" summary="题目生成论文,第一个版本">
<created>1680511772109</created>
<option name="number" value="00001" />
<option name="presentableId" value="LOCAL-00001" />
<option name="project" value="LOCAL" />
<updated>1680511772109</updated>
</task>
<task id="LOCAL-00002" summary="新增多api多篇并发">
<created>1680771803627</created>
<option name="number" value="00002" />
<option name="presentableId" value="LOCAL-00002" />
<option name="project" value="LOCAL" />
<updated>1680771803627</updated>
</task>
<task id="LOCAL-00003" summary="新增多api多篇并发flask">
<created>1680835614890</created>
<option name="number" value="00003" />
<option name="presentableId" value="LOCAL-00003" />
<option name="project" value="LOCAL" />
<updated>1680835614890</updated>
</task>
<option name="localTasksCounter" value="4" />
<servers />
</component>
<component name="TypeScriptGeneratedFilesManager">
<option name="version" value="3" />
</component>
<component name="Vcs.Log.Tabs.Properties">
<option name="TAB_STATES">
<map>
<entry key="MAIN">
<value>
<State />
</value>
</entry>
</map>
</option>
</component>
<component name="VcsManagerConfiguration">
<MESSAGE value="题目生成论文,第一个版本" />
<MESSAGE value="新增多api多篇并发" />
<MESSAGE value="新增多api多篇并发flask" />
<option name="LAST_COMMIT_MESSAGE" value="新增多api多篇并发flask" />
</component>
<component name="XDebuggerManager">
<breakpoint-manager>
<breakpoints>
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
<url>file://$PROJECT_DIR$/flask_serves.py</url>
<line>301</line>
<option name="timeStamp" value="2" />
</line-breakpoint>
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
<url>file://$PROJECT_DIR$/flask_serves.py</url>
<line>79</line>
<option name="timeStamp" value="5" />
</line-breakpoint>
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
<url>file://$PROJECT_DIR$/flask_serves.py</url>
<line>114</line>
<option name="timeStamp" value="6" />
</line-breakpoint>
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
<url>file://$PROJECT_DIR$/flask_serves.py</url>
<line>204</line>
<option name="timeStamp" value="8" />
</line-breakpoint>
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
<url>file://$PROJECT_DIR$/flask_serves.py</url>
<line>206</line>
<option name="timeStamp" value="9" />
</line-breakpoint>
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
<url>file://$PROJECT_DIR$/flask_serves.py</url>
<line>122</line>
<option name="timeStamp" value="10" />
</line-breakpoint>
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
<url>file://$PROJECT_DIR$/flask_serves.py</url>
<line>208</line>
<option name="timeStamp" value="12" />
</line-breakpoint>
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
<url>file://$PROJECT_DIR$/flask_serves.py</url>
<line>209</line>
<option name="timeStamp" value="13" />
</line-breakpoint>
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
<url>file://$PROJECT_DIR$/flask_serves.py</url>
<line>197</line>
<option name="timeStamp" value="14" />
</line-breakpoint>
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
<url>file://$PROJECT_DIR$/flask_serves.py</url>
<line>112</line>
<option name="timeStamp" value="16" />
</line-breakpoint>
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
<url>file://$PROJECT_DIR$/flask_serves.py</url>
<line>111</line>
<option name="timeStamp" value="17" />
</line-breakpoint>
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
<url>file://$PROJECT_DIR$/flask_serves.py</url>
<line>199</line>
<option name="timeStamp" value="19" />
<line>96</line>
<option name="timeStamp" value="41" />
</line-breakpoint>
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
<url>file://$PROJECT_DIR$/flask_serves.py</url>
<line>318</line>
<option name="timeStamp" value="20" />
<url>file://$PROJECT_DIR$/flask_sever_1.py</url>
<line>531</line>
<option name="timeStamp" value="74" />
</line-breakpoint>
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
<url>file://$PROJECT_DIR$/flask_serves.py</url>
<line>317</line>
<option name="timeStamp" value="21" />
<url>file://$PROJECT_DIR$/flask_sever_1.py</url>
<line>522</line>
<option name="timeStamp" value="75" />
</line-breakpoint>
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
<url>file://$PROJECT_DIR$/flask_serves.py</url>
<line>316</line>
<option name="timeStamp" value="22" />
<url>file://$PROJECT_DIR$/flask_sever_1.py</url>
<line>518</line>
<option name="timeStamp" value="76" />
</line-breakpoint>
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
<url>file://$PROJECT_DIR$/flask_serves.py</url>
<line>312</line>
<option name="timeStamp" value="23" />
<url>file://$PROJECT_DIR$/flask_sever_1.py</url>
<line>536</line>
<option name="timeStamp" value="77" />
</line-breakpoint>
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
<url>file://$PROJECT_DIR$/flask_serves.py</url>
<line>320</line>
<option name="timeStamp" value="24" />
<url>file://$PROJECT_DIR$/flask_sever_1.py</url>
<line>537</line>
<option name="timeStamp" value="78" />
</line-breakpoint>
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
<url>file://$PROJECT_DIR$/flask_serves.py</url>
<line>321</line>
<option name="timeStamp" value="25" />
<url>file://$PROJECT_DIR$/flask_sever_1.py</url>
<line>100</line>
<option name="timeStamp" value="79" />
</line-breakpoint>
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
<url>file://$PROJECT_DIR$/flask_serves.py</url>
<line>322</line>
<option name="timeStamp" value="26" />
<url>file://$PROJECT_DIR$/flask_sever_1.py</url>
<line>85</line>
<option name="timeStamp" value="80" />
</line-breakpoint>
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
<url>file://$PROJECT_DIR$/flask_serves.py</url>
<line>323</line>
<option name="timeStamp" value="27" />
<url>file://$PROJECT_DIR$/flask_sever_1.py</url>
<line>115</line>
<option name="timeStamp" value="81" />
</line-breakpoint>
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
<url>file://$PROJECT_DIR$/flask_serves.py</url>
<line>324</line>
<option name="timeStamp" value="28" />
<url>file://$PROJECT_DIR$/flask_sever_1.py</url>
<line>188</line>
<option name="timeStamp" value="82" />
</line-breakpoint>
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
<url>file://$PROJECT_DIR$/flask_serves.py</url>
<line>143</line>
<option name="timeStamp" value="36" />
<url>file://$PROJECT_DIR$/flask_sever_1.py</url>
<line>239</line>
<option name="timeStamp" value="83" />
</line-breakpoint>
</breakpoints>
</breakpoint-manager>
</component>
<component name="com.intellij.coverage.CoverageDataManagerImpl">
<SUITE FILE_PATH="coverage/ChatGPT_Sever$.coverage" NAME="下载文件 Coverage Results" MODIFIED="1680334642118" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$uuid.coverage" NAME="查询uuid Coverage Results" MODIFIED="1680506852264" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$.coverage" NAME="移除元素 Coverage Results" MODIFIED="1680766939365" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$ceshi.coverage" NAME="ceshi正则 Coverage Results" MODIFIED="1680245547162" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$lock_flask.coverage" NAME="lock_flask Coverage Results" MODIFIED="1680504278301" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$Flask__flask_serves_py_.coverage" NAME="Flask (flask_serves.py) Coverage Results" MODIFIED="1680224653637" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$diaoyong_gpt_3.coverage" NAME="diaoyong_gpt-3 Coverage Results" MODIFIED="1680413670454" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$Flask__GPT_service_py_.coverage" NAME="Flask (GPT_service.py) Coverage Results" MODIFIED="1680191382503" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$GPT_service.coverage" NAME="GPT_service Coverage Results" MODIFIED="1680439068329" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$jar_bagk.coverage" NAME="jar_bagk Coverage Results" MODIFIED="1680256965020" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$Flask__ceshiflask_py_.coverage" NAME="Flask (ceshiflask.py) Coverage Results" MODIFIED="1680334430007" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$flask_serves.coverage" NAME="flask_serves Coverage Results" MODIFIED="1680492218263" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$ceshishengcehngpath.coverage" NAME="ceshishengcehngpath Coverage Results" MODIFIED="1680236688849" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$flask_serves.coverage" NAME="flask_serves Coverage Results" MODIFIED="1680600126921" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$cewshi.coverage" NAME="cewshi Coverage Results" MODIFIED="1680509846740" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$Flask__lock_flask_py_.coverage" NAME="Flask (lock_flask.py) Coverage Results" MODIFIED="1680494176187" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$ceshi_chat.coverage" NAME="ceshi_chat Coverage Results" MODIFIED="1680439193947" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$ceshisuo.coverage" NAME="ceshisuo Coverage Results" MODIFIED="1680511457852" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$chatgpt.coverage" NAME="测试chatgpt调用接口 Coverage Results" MODIFIED="1680854857821" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$uuid.coverage" NAME="测试生成uuid Coverage Results" MODIFIED="1680860391233" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$Flask__flask_serves_py_.coverage" NAME="Flask (flask_serves.py) Coverage Results" MODIFIED="1680224653637" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$diaoyong_gpt_3.coverage" NAME="diaoyong_gpt-3 Coverage Results" MODIFIED="1680413670454" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$chatgpt_post.coverage" NAME="chatgpt_post Coverage Results" MODIFIED="1680855972364" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$Flask__ceshiflask_py_.coverage" NAME="Flask (ceshiflask.py) Coverage Results" MODIFIED="1680334430007" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$ceshishengcehngpath.coverage" NAME="ceshishengcehngpath Coverage Results" MODIFIED="1680236688849" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$flask_serce_dev_1.coverage" NAME="flask_serce_dev_1 Coverage Results" MODIFIED="1680768468214" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$ceshiflask.coverage" NAME="ceshiflask Coverage Results" MODIFIED="1680336319266" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$ceshibianma.coverage" NAME="ceshibianma Coverage Results" MODIFIED="1680255589158" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$ceshi_chat.coverage" NAME="ceshi_chat Coverage Results" MODIFIED="1680439193947" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$ip.coverage" NAME="获取本机ip Coverage Results" MODIFIED="1680683503501" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$flask_sever_1.coverage" NAME="flask_sever_1 Coverage Results" MODIFIED="1680860358565" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$ceshiduojincheng.coverage" NAME="ceshiduojincheng Coverage Results" MODIFIED="1680502039974" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$ceshisuo.coverage" NAME="ceshisuo Coverage Results" MODIFIED="1680511457852" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/ChatGPT_Sever$json.coverage" NAME="打开json Coverage Results" MODIFIED="1680765942021" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
</component>
</project>

30
chatGPT账号 26.txt

@ -0,0 +1,30 @@
主号
jhma1234333@gmail.com shabi123* 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 付款账户 :{plus:5319 9345 1099 3434, api:
1987890321@qq.com zjz1987890321++ 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 付款账户 :{plus:5319 9345 1099 3434 ,api:5319 9345 1099 3434
1432334894@qq.com H1234567 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 付款账户 :
33367671@qq.com ly33367671. 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 付款账户 :
gradymendoza342@gmail.com----qx6kqppvupyg566h----a0fndmcme8y7x3z3@aol.com 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
romypynysy76433@gmail.com----x87zd8qdq9gg27kw----h4oizvpsykhz0kfn@gmx.com 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
jesikajoya819@gmail.com----cdekq9gq9q9btz7h----vasf5ofm6txmojis@proton.me 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
ounrzagrdezi@gmail.com----tkfevgjsbpcc54w0----qflrsz92ebbkot97@gmx.com api-keys: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
aiyaaiaj27262@gmail.com----m4h48gamzqp48us9----aibmmesoooduwjmo@mail.com api-keys: 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
homeworkshxjdhd@gmail.com----fjyysmunpoce16w6----yq29o1u911v0vw5n@outlook.com api-keys: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
hansikamutvani@gmail.com----g794fak5wy9mobqp----vu3gu1ou0nz48u0a@hotmail.com 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
qatalpend356@gmail.com----xo894ftgachr1huq----pg7gnc4a7l1wly1y@proton.me api-keys: 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
akhiakther8373@gmail.com----dt6g2tp0qkp8t24f----ys1w8k6j0wo6l144@aol.com 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
qatalpend356@gmail.com----xo894ftgachr1huq----pg7gnc4a7l1wly1y@proton.me api-keys: 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
ramtonazkine@gmail.com----qquh8uaabgdznrtg----exsypsq2buqxwbfa@gmx.com api-keys: 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
davislaird085@gmail.com----cw1fhcde8syffqmy----trre1iri5mhd2v8w@outlook.com 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
ruiduphfo35644@gmail.com----hbu6458aa44f9s8h----ns237070stg1v5kn@163.com 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
najminmohonas3@gmail.com----mctpqhe06opspbnt----i9m52jmf3i4ngs6a@mail.com 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
eobdkxpwpekd@gmail.com----cpqqxxqk0wf34cba----j59ix0ibxwmlzohf@mail.com api-keys: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
subornasalinia@gmail.com----tn4tved9akum1kx1----ztxqc4kbttzp75om@outlook.com api-keys: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
peswjfgsdsv122@gmail.com----cwsgftzg85h0npx6----baua3t3317j007tx@gmx.com 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
yyzhao@entroduction.cn she220877 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
JacobRios531@outlook.com----bkwga59Kw7----org-IkKh08PJohhA8ISPAu3F1CoH----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
副号
JacobRios531@outlook.com----bkwga59Kw7----org-IkKh08PJohhA8ISPAu3F1CoH----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
joakrdallizx@gmail.com----b9mbyj3yv5q71fkn----zagztz5lnnwuq9hz@hotmail.com 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

56
chatgpt_post.py

@ -0,0 +1,56 @@
import requests
import json
OPENAI_API_KEY = "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"
url = "https://api.openai.com/v1/chat/completions"
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {OPENAI_API_KEY}"
}
data = {
"model": "gpt-3.5-turbo",
"messages": [
{"role": "user", "content": "请帮我改写这句话:在城市发展进程当中,逐渐呈现出一些综合性的大型建筑群。"},
{"role": "assistant", "content": "随着城市的发展,综合性大型建筑群正在逐渐出现。"},
{"role": "user", "content": "这句话我不满意,再改一下帮我"}
],
"temperature": 0.7
}
response = requests.post(url,
headers=headers,
data=json.dumps(data),
timeout=1000)
res = response.json()
print(res)
print(res["choices"][0]["message"]["content"])
OPENAI_API_KEY = api_key
url = "https://api.openai.com/v1/chat/completions"
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {OPENAI_API_KEY}"
}
data = {
"model": "gpt-3.5-turbo",
"messages": [
{"role": "user", "content": "请帮我改写这句话:在城市发展进程当中,逐渐呈现出一些综合性的大型建筑群。"},
{"role": "assistant", "content": "随着城市的发展,综合性大型建筑群正在逐渐出现。"},
{"role": "user", "content": "这句话我不满意,再改一下帮我"}
],
"temperature": 0.7
}
response = requests.post(url,
headers=headers,
data=json.dumps(data),
timeout=1000)
redis_.rpush(redis_key_name_openaikey_list, api_key)

714
flask_serve_2.py

@ -0,0 +1,714 @@
from flask import Flask, jsonify, Response
from flask import request
import redis
import uuid
import json
import time
import threading
from threading import Thread
from flask import send_file, send_from_directory
import os
from flask import make_response
import openai
import base64
import re
import urllib.parse as pa
import socket
from serve_config_1 import Config
import requests
config = Config()
app = Flask(__name__)
app.config["JSON_AS_ASCII"] = False
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
localhostip = s.getsockname()[0]
lock = threading.RLock()
pool = redis.ConnectionPool(host=config.reids_ip, port=config.reids_port, max_connections=50, db=config.reids_db,
password=config.reids_password)
redis_ = redis.Redis(connection_pool=pool, decode_responses=True)
thanks = "致谢"
references = "参考文献"
flask_serves_env = "http://{}:{}".format(localhostip, config.flask_port)
paper_download_url = flask_serves_env + "/download?filename_path={}/paper.docx"
paper_start_download_url = flask_serves_env + "/download?filename_path={}/paper_start.docx"
redis_key_name_openaikey_bad_dict = "openaikey_bad_list_{}".format(str(localhostip))
redis_key_name_openaikey_list = "openaikey_list_{}".format(str(localhostip))
redis_title = "redis_title"
redis_title_ing = "redis_title_ing"
redis_small_task = "redis_small_task"
redis_res = "redis_res"
for i in config.openaikey_list:
redis_.rpush(redis_key_name_openaikey_list, i)
redis_.hset(redis_key_name_openaikey_bad_dict, "1", "1")
redis_.persist(redis_key_name_openaikey_list)
redis_.persist(redis_key_name_openaikey_bad_dict)
def request_api_chatgpt(api_key, prompt):
OPENAI_API_KEY = api_key
url = "https://api.openai.com/v1/chat/completions"
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {OPENAI_API_KEY}"
}
data = {
"model": "gpt-3.5-turbo",
"messages": [
{"role": "user", "content": prompt},
],
"temperature": 0.5
}
response = requests.post(url,
headers=headers,
data=json.dumps(data),
timeout=600)
return response
def chat_kaitibaogao(api_key, uuid, main_parameter, task_type):
try:
response = request_api_chatgpt(api_key, config.kaitibaogao_prompt.format(main_parameter[0]))
res = response.json()
kaitibaogao = res["choices"][0]["message"]["content"]
redis_.rpush(redis_key_name_openaikey_list, api_key)
redis_.persist(redis_key_name_openaikey_list)
except:
"""
发送警报
"""
kaitibaogao = ""
redis_.hset(redis_key_name_openaikey_bad_dict, uuid, str((api_key, task_type)))
lock.acquire()
res_dict_str = redis_.hget(redis_res, uuid)
res_dict = json.loads(res_dict_str)
res_dict["tasking_num"] += 1
print("子任务进度".format(uuid), res_dict["tasking_num"])
res_dict["开题报告"] = kaitibaogao
res_dict_str = json.dumps(res_dict, ensure_ascii=False)
redis_.hset(redis_res, uuid, res_dict_str)
lock.release()
def chat_abstract_keyword(api_key, uuid, main_parameter, task_type):
try:
# 生成中文摘要
response = request_api_chatgpt(api_key, config.chinese_abstract_prompt.format(main_parameter[0]))
res = response.json()
chinese_abstract = res["choices"][0]["message"]["content"]
# 生成英文的摘要
response = request_api_chatgpt(api_key, config.english_abstract_prompt.format(chinese_abstract))
res = response.json()
english_abstract = res["choices"][0]["message"]["content"]
# 生成中文关键字
response = request_api_chatgpt(api_key, config.chinese_keyword_prompt.format(chinese_abstract))
res = response.json()
chinese_keyword = res["choices"][0]["message"]["content"]
# 生成英文关键字
response = request_api_chatgpt(api_key, config.english_keyword_prompt.format(chinese_keyword))
res = response.json()
english_keyword = res["choices"][0]["message"]["content"]
redis_.rpush(redis_key_name_openaikey_list, api_key)
redis_.persist(redis_key_name_openaikey_list)
except:
"""
发送警报
"""
chinese_abstract = ""
english_abstract = ""
chinese_keyword = ""
english_keyword = ""
redis_.hset(redis_key_name_openaikey_bad_dict, uuid, str((api_key, task_type)))
paper_abstract_keyword = {
"中文摘要": chinese_abstract,
"英文摘要": english_abstract,
"中文关键词": chinese_keyword,
"英文关键词": english_keyword
}
# json_str = json.dumps(paper_abstract_keyword, indent=4, ensure_ascii=False)
# abstract_keyword_path = os.path.join(uuid_path, "abstract_keyword.json")
# with open(abstract_keyword_path, 'w') as json_file:
# json_file.write(json_str)
#
# lock.acquire()
# api_key_list.append(api_key)
# lock.release()
lock.acquire()
res_dict_str = redis_.hget(redis_res, uuid)
res_dict = json.loads(res_dict_str)
res_dict["tasking_num"] += 1
print("子任务进度".format(uuid), res_dict["tasking_num"])
res_dict["中文摘要"] = paper_abstract_keyword["中文摘要"]
res_dict["英文摘要"] = paper_abstract_keyword["英文摘要"]
res_dict["中文关键词"] = paper_abstract_keyword["中文关键词"]
res_dict["英文关键词"] = paper_abstract_keyword["英文关键词"]
res_dict_str = json.dumps(res_dict, ensure_ascii=False)
redis_.hset(redis_res, uuid, res_dict_str)
lock.release()
def chat_content(api_key, uuid, main_parameter, task_type):
'''
:param api_key:
:param uuid:
:param main_parameter:
:return:
'''
content_index = main_parameter[0]
title = main_parameter[1]
mulu = main_parameter[2]
subtitle = main_parameter[3]
prompt = main_parameter[4]
if subtitle[:2] == "@@":
res_content = subtitle[2:]
else:
try:
response = request_api_chatgpt(api_key, prompt.format(title, mulu, subtitle))
res = response.json()
res_content = res["choices"][0]["message"]["content"]
redis_.rpush(redis_key_name_openaikey_list, api_key)
redis_.persist(redis_key_name_openaikey_list)
except:
"""
发送警报
"""
res_content = ""
redis_.hset(redis_key_name_openaikey_bad_dict, uuid, str((api_key, task_type)))
lock.acquire()
res_dict_str = redis_.hget(redis_res, uuid)
res_dict = json.loads(res_dict_str)
res_dict["tasking_num"] += 1
print("子任务进度".format(uuid), res_dict["tasking_num"])
table_of_contents = res_dict["table_of_contents"]
table_of_contents[content_index] = res_content
res_dict["table_of_contents"] = table_of_contents
res_dict_str = json.dumps(res_dict, ensure_ascii=False)
redis_.hset(redis_res, uuid, res_dict_str)
lock.release()
def chat_thanks(api_key, uuid, main_parameter, task_type):
'''
:param api_key:
:param uuid:
:param main_parameter:
:return:
'''
# title,
# thank_prompt
title = main_parameter[0]
prompt = main_parameter[1]
try:
response = request_api_chatgpt(api_key, prompt.format(title))
res = response.json()
res_content = res["choices"][0]["message"]["content"]
redis_.rpush(redis_key_name_openaikey_list, api_key)
redis_.persist(redis_key_name_openaikey_list)
except:
"""
发送警报
"""
res_content = ""
redis_.hset(redis_key_name_openaikey_bad_dict, uuid, str((api_key, task_type)))
lock.acquire()
res_dict_str = redis_.hget(redis_res, uuid)
res_dict = json.loads(res_dict_str)
res_dict["tasking_num"] += 1
print("子任务进度".format(uuid), res_dict["tasking_num"])
res_dict["致谢"] = res_content
res_dict_str = json.dumps(res_dict, ensure_ascii=False)
redis_.hset(redis_res, uuid, res_dict_str)
lock.release()
def chat_references(api_key, uuid, main_parameter, task_type):
'''
:param api_key:
:param uuid:
:param main_parameter:
:return:
'''
# title,
# mulu,
# references_prompt
title = main_parameter[0]
mulu = main_parameter[1]
prompt = main_parameter[2]
try:
response = request_api_chatgpt(api_key, prompt.format(title, mulu))
res = response.json()
res_content = res["choices"][0]["message"]["content"]
redis_.rpush(redis_key_name_openaikey_list, api_key)
redis_.persist(redis_key_name_openaikey_list)
except:
"""
发送警报
"""
res_content = ""
redis_.hset(redis_key_name_openaikey_bad_dict, uuid, str((api_key, task_type)))
# 加锁 读取resis并存储结果
lock.acquire()
res_dict_str = redis_.hget(redis_res, uuid)
res_dict = json.loads(res_dict_str)
res_dict["tasking_num"] += 1
print("子任务进度".format(uuid), res_dict["tasking_num"])
res_dict["参考文献"] = res_content
res_dict_str = json.dumps(res_dict, ensure_ascii=False)
redis_.hset(redis_res, uuid, res_dict_str)
lock.release()
def threading_mulu(api_key, title, uuid):
'''
生成目录并吧任务拆解进入子任务的redis_list中和储存结果的redis_list中
:return:
'''
try:
response = request_api_chatgpt(api_key, config.mulu_prompt.format(title))
res = response.json()
mulu = res["choices"][0]["message"]["content"]
redis_.rpush(redis_key_name_openaikey_list, api_key)
redis_.persist(redis_key_name_openaikey_list)
except:
"""
发送警报
"""
res_content = ""
redis_.hset(redis_key_name_openaikey_bad_dict, uuid, str(api_key, "mulu"))
mulu = ""
try:
mulu_list = str(mulu).split("\n")
mulu_list = [i.strip() for i in mulu_list if i != ""]
print(mulu_list)
cun_bool = False
table_of_contents = [mulu_list[0]]
for i in mulu_list[1:]:
result_second_biaoti_list = re.findall(config.pantten_second_biaoti, i)
result_other_biaoti_list = re.findall(config.pantten_other_biaoti, i)
if result_second_biaoti_list != []:
table_of_contents.append("@@" + i)
cun_bool = True
continue
if cun_bool == False:
continue
else:
if result_other_biaoti_list != []:
table_of_contents.append("@@" + i)
else:
table_of_contents.append(i)
print(table_of_contents)
# table_of_contents = table_of_contents[:3] + table_of_contents[-1:]
# print(table_of_contents)
thanks_references_bool_table = table_of_contents[-3:]
# thanks = "致谢"
# references = "参考文献"
if references in thanks_references_bool_table:
table_of_contents.remove(references)
if thanks in thanks_references_bool_table:
table_of_contents.remove(thanks)
# table_of_contents.append(thanks)
# table_of_contents.append(references)
# if thanks not in thanks_bool_table:
# table_of_contents.insert(-1, "致谢")
#
# if thanks not in thanks_bool_table:
# table_of_contents.insert(-1, "致谢")
print(len(table_of_contents))
small_task_list = []
# api_key,
# index,
# title,
# mulu,
# subtitle,
# prompt
kaitibaogao_task = {
"task_type": "kaitibaogao",
"uuid": uuid,
"main_parameter": [title]
}
chat_abstract_task = {
"task_type": "chat_abstract",
"uuid": uuid,
"main_parameter": [title]
}
small_task_list.append(kaitibaogao_task)
small_task_list.append(chat_abstract_task)
content_index = 0
while True:
if content_index == len(table_of_contents):
break
subtitle = table_of_contents[content_index]
if content_index == 0:
prompt = config.first_title_prompt
elif subtitle == "参考文献":
prompt = config.references_prompt
elif subtitle == "致谢":
prompt = config.thank_prompt
else:
prompt = config.small_title_prompt
print("请求的所有参数",
content_index,
title,
subtitle,
prompt)
paper_content = {
"task_type": "paper_content",
"uuid": uuid,
"main_parameter": [
content_index,
title,
mulu,
subtitle,
prompt
]
}
small_task_list.append(paper_content)
content_index += 1
thanks_task = {
"task_type": "thanks_task",
"uuid": uuid,
"main_parameter": [
title,
config.thank_prompt
]
}
references_task = {
"task_type": "references_task",
"uuid": uuid,
"main_parameter": [
title,
mulu,
config.references_prompt
]
}
small_task_list.append(thanks_task)
small_task_list.append(references_task)
for small_task in small_task_list:
small_task = json.dumps(small_task, ensure_ascii=False)
redis_.rpush(redis_small_task, small_task)
redis_.persist(redis_key_name_openaikey_list)
res = {
"uuid": uuid,
"num_small_task": len(small_task_list),
"tasking_num": 0,
"标题": title,
"目录": mulu,
"开题报告": "",
"任务书": "",
"中文摘要": "",
"英文摘要": "",
"中文关键词": "",
"英文关键词": "",
"正文": "",
"致谢": "",
"参考文献": "",
"table_of_contents": [""] * len(table_of_contents)
}
res = json.dumps(res, ensure_ascii=False)
redis_.hset(redis_res, uuid, res)
except:
print("目录程序错误")
def threading_1():
# title, redis_key_name_openaikey_list
'''
生成目录
:param title:
:param redis_key_name_openaikey_list:
:return:
'''
while True:
if redis_.llen(redis_small_task) != 0: # 若队列中有元素就跳过
time.sleep(1)
continue
elif redis_.llen(redis_title) != 0 and redis_.llen(redis_key_name_openaikey_list) != 0:
title_uuid_dict_str = redis_.lpop(redis_title).decode('UTF-8')
api_key = redis_.lpop(redis_key_name_openaikey_list).decode('UTF-8')
# redis_title:{"id": id_, "title": title}
title_uuid_dict = json.loads(title_uuid_dict_str)
title = title_uuid_dict["title"]
uuid_id = title_uuid_dict["id"]
t = Thread(target=threading_mulu, args=(api_key,
title,
uuid_id,
))
t.start()
else:
time.sleep(1)
continue
def threading_2():
'''
顺序读取子任务
:return:
'''
while True:
if redis_.llen(redis_small_task) != 0 and redis_.llen(redis_key_name_openaikey_list) != 0:
# 执行小标题的任务
api_key = redis_.lpop(redis_key_name_openaikey_list).decode('UTF-8')
small_title = redis_.lpop(redis_small_task).decode('UTF-8')
small_title = json.loads(small_title)
task_type = small_title["task_type"]
uuid = small_title["uuid"]
main_parameter = small_title["main_parameter"]
# "task_type": "paper_content",
# "uuid": uuid,
# "main_parameter": [
# "task_type": "paper_content",
# "task_type": "chat_abstract",
# "task_type": "kaitibaogao",
if task_type == "kaitibaogao":
t = Thread(target=chat_kaitibaogao, args=(api_key,
uuid,
main_parameter,
task_type))
t.start()
elif task_type == "chat_abstract":
t = Thread(target=chat_abstract_keyword, args=(api_key,
uuid,
main_parameter,
task_type))
t.start()
elif task_type == "paper_content":
t = Thread(target=chat_content, args=(api_key,
uuid,
main_parameter,
task_type))
t.start()
elif task_type == "thanks_task":
t = Thread(target=chat_thanks, args=(api_key,
uuid,
main_parameter,
task_type))
t.start()
elif task_type == "references_task":
t = Thread(target=chat_references, args=(api_key,
uuid,
main_parameter,
task_type))
t.start()
else:
time.sleep(1)
continue
def threading_3():
while True:
res_end_list = []
res_dict = redis_.hgetall(redis_res)
for key, values in res_dict.items():
values_dict = json.loads(values)
# "num_small_task": len(small_task_list) - 1,
# "tasking_num": 0,
if int(values_dict["num_small_task"]) == int(values_dict["tasking_num"]):
res_end_list.append(key)
if res_end_list != []:
for key in res_end_list:
redis_.hdel(redis_res, key)
res_str = res_dict[key].decode("utf-8")
json_str = json.dumps(res_str, indent=4, ensure_ascii=False)
key = str(key, encoding="utf-8")
uuid_path = os.path.join(config.project_data_txt_path, key)
os.makedirs(uuid_path)
paper_content_path = os.path.join(uuid_path, "paper_content.json")
with open(paper_content_path, 'w') as json_file:
json_file.write(json_str)
"""
调用jar包
占位
"""
url_path_paper = paper_download_url.format(key)
url_path_kaiti = paper_start_download_url.format(key)
return_text = str({"id": key,
"content_url_path": url_path_paper,
"content_report_url_path": url_path_kaiti,
"probabilities": None,
"status_code": 200})
redis_.srem(redis_title_ing, key)
redis_.set(key, return_text, 28800)
time.sleep(1)
# def main(title):
# # print(request.remote_addr)
# # title = request.json["title"]
#
# id_ = str(uuid.uuid1())
# print(id_)
# redis_.rpush(redis_title, json.dumps({"id": id_, "title": title})) # 加入redis
@app.route("/chat", methods=["POST"])
def chat():
print(request.remote_addr)
title = request.json["title"]
id_ = str(uuid.uuid1())
print(id_)
redis_.rpush(redis_title, json.dumps({"id": id_, "title": title}, ensure_ascii=False)) # 加入redis
redis_.persist(redis_key_name_openaikey_list)
return_text = {"texts": {'id': id_, }, "probabilities": None, "status_code": 200}
print("ok")
redis_.sadd(redis_title_ing, id_)
return jsonify(return_text) # 返回结果
@app.route("/download", methods=['GET'])
def download_file():
# 需要知道2个参数, 第1个参数是本地目录的path, 第2个参数是文件名(带扩展名)
# directory = os.path.join(project_data_txt_path, filename) # 假设在当前目录
# uuid_path, word_name = str(filename).split("/")
# word_path_root = os.path.join(project_data_txt_path, uuid_path)
# response = make_response(send_from_directory(word_path_root, word_name, as_attachment=True))
# response.headers["Content-Disposition"] = "attachment; filename={}".format(filename.encode().decode('latin-1'))
filename_path = request.args.get('filename_path', '')
filename = filename_path.split("/")[1]
path_name = os.path.join(config.project_data_txt_path, filename_path)
with open(path_name, 'rb') as f:
stream = f.read()
response = Response(stream, content_type='application/octet-stream')
response.headers['Content-disposition'] = 'attachment; filename={}'.format(filename)
return response
@app.route("/search", methods=["POST"])
def search():
id_ = request.json['id'] # 获取用户query中的文本 例如"I love you"
result = redis_.get(id_) # 获取该query的模型结果
if result is not None:
# redis_.delete(id_)
# result_dict = result.decode('UTF-8')
if redis_.hexists(redis_key_name_openaikey_bad_dict, id_) == True:
result_text = {'code': "204", 'text': "", 'probabilities': None}
else:
result_dict = eval(result)
# return_text = {"id":query_id, "load_result_path": load_result_path, "probabilities": None, "status_code": 200}
query_id = result_dict["id"]
# "content_url_path": url_path_paper,
# "content_report_url_path": url_path_kaiti,
content_url_path = result_dict["content_url_path"]
content_report_url_path = result_dict["content_report_url_path"]
probabilities = result_dict["probabilities"]
result_text = {'code': 200,
'content_url_path': content_url_path,
'content_report_url_path': content_report_url_path,
'probabilities': probabilities}
else:
querying_list = list(redis_.smembers(redis_title_ing))
querying_set = set()
for i in querying_list:
querying_set.add(i.decode())
querying_bool = False
if id_ in querying_set:
querying_bool = True
query_list_json = redis_.lrange(redis_title, 0, -1)
query_set_ids = set()
for i in query_list_json:
data_dict = json.loads(i)
query_id = data_dict['id']
query_set_ids.add(query_id)
query_bool = False
if id_ in query_set_ids:
query_bool = True
if querying_bool == True and query_bool == True:
result_text = {'code': "201", 'text': "", 'probabilities': None}
elif querying_bool == True and query_bool == False:
result_text = {'code': "202", 'text': "", 'probabilities': None}
else:
result_text = {'code': "203", 'text': "", 'probabilities': None}
return jsonify(result_text) # 返回结果
# threading_1 # 根据标题获取子任务,存入子任务序列
# threading_2 # 根据子任务生成结果,存入结果序列
# threading_3 # 根据存储的结果序列,看是否完成,如果完成输出json文件以及word
t = Thread(target=threading_1)
t.start()
t = Thread(target=threading_2)
t.start()
if __name__ == '__main__':
# main("大型商业建筑人员疏散设计研究")
app.run(host="0.0.0.0", port=14002, threaded=True, debug=False)

51
flask_serves.py

@ -26,7 +26,7 @@ import re
import urllib.parse as pa
pool = redis.ConnectionPool(host='localhost', port=6379, max_connections=50, db=1)
pool = redis.ConnectionPool(host='localhost', port=63179, max_connections=50, db=1, password='Zhicheng123*')
redis_ = redis.Redis(connection_pool=pool, decode_responses=True)
db_key_query = 'query'
@ -40,12 +40,16 @@ import logging
lock = threading.RLock()
mulu_prompt = "请帮我根据题目为“{}”生成一个论文目录"
first_title_prompt = "论文题目是“{}”,目录是“{}”,请把其中的大标题“{}”的内容续写完整,保证续写内容不少于1000字"
small_title_prompt = "论文题目是“{}”,目录是“{}”,请把其中的小标题“{}”的内容续写完整,保证续写内容不少于1000字"
references_prompt = "论文题目是“{}”,目录是“{}”,请为这篇论文生成中文的{}"
first_title_prompt = "论文题目是“{}”,目录是“{}”,请把其中的大标题“{}”的内容续写完整,保证续写内容不少于800字"
small_title_prompt = "论文题目是“{}”,目录是“{}”,请把其中的小标题“{}”的内容续写完整,保证续写内容不少于800字"
references_prompt = "论文题目是“{}”,目录是“{}”,请为这篇论文生成15篇中文的{},要求其中有有中文参考文献不低于12篇,英文参考文献不低于2篇"
thank_prompt = "论文题目是“{}”,目录是“{}”,请把其中的{}部分续写完整"
kaitibaogao_prompt = "请以《{}》为题目生成研究的主要的内容、背景、目的、意义,要求不少于1500字"
chinese_abstract_prompt = "请以《{}》为题目生成论文摘要,要求不少于500字"
english_abstract_prompt = "请把“{}”这段文字翻译成英文"
chinese_keyword_prompt = "请为“{}”这段论文摘要生成3-5个关键字"
english_keyword_prompt = "请把“{}”这几个关键字翻译成英文"
thanks = "致谢"
dabiaoti = ["","","","","","","",""]
# 正则
@ -57,7 +61,14 @@ project_data_txt_path = "/home/majiahui/ChatGPT_Sever/new_data_txt"
api_key_list = ["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",
"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",
"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",
"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"]
"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",
"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",
"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",
"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",
"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",
"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"]
# "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"
def chat_title(title, api_key):
global lock
@ -82,6 +93,7 @@ def chat_title(title, api_key):
def chat_kaitibaogao(title, api_key, uuid_path):
global lock
# time.sleep(1)
openai.api_key = api_key
res = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
@ -148,7 +160,10 @@ def classify(): # 调用模型,设置最大batch_size
# 生成开题报告
# title, api_key, uuid_path
lock.acquire()
api_key = api_key_list.pop()
lock.release()
t = Thread(target=chat_kaitibaogao, args=(title,
api_key,
uuid_path,
@ -198,15 +213,26 @@ def classify(): # 调用模型,设置最大batch_size
print(table_of_contents)
# table_of_contents = table_of_contents[:3] + table_of_contents[-1:]
# print(table_of_contents)
thanks_bool_table = table_of_contents[-3:]
if thanks not in thanks_bool_table:
table_of_contents.insert(-1, "致谢")
chat_class = GeneratePaper(mulu_list, table_of_contents)
print(len(table_of_contents))
############################################################
while True:
if api_key_list == []:
time.sleep(1)
continue
if index == len(table_of_contents):
break
lock.acquire()
api_key = api_key_list.pop()
lock.release()
subtitle = table_of_contents[index]
if index == 0:
prompt = first_title_prompt
@ -215,18 +241,17 @@ def classify(): # 调用模型,设置最大batch_size
elif subtitle == "致谢":
prompt = thank_prompt
else:
prompt = first_title_prompt
prompt = small_title_prompt
print("请求的所有参数", api_key,
index,
title,
mulu_list,
subtitle,
prompt)
t = Thread(target=chat_class.chat_content_, args=(api_key,
index,
title,
mulu_list,
mulu,
subtitle,
prompt))
t.start()
@ -278,8 +303,8 @@ def classify(): # 调用模型,设置最大batch_size
os.system("java -Dfile.encoding=UTF-8 -jar '/home/majiahui/ChatGPT_Sever/createAiXieZuoKaitiWord.jar' '{}' '{}'".format(
kaitibaogao_txt_path, save_word_paper_start))
url_path_paper = "http://104.244.90.248:14000/download?filename_path={}/paper.docx".format(query_id)
url_path_kaiti = "http://104.244.90.248:14000/download?filename_path={}/paper_start.docx".format(query_id)
url_path_paper = "http://104.244.89.190:14000/download?filename_path={}/paper.docx".format(query_id)
url_path_kaiti = "http://104.244.89.190:14000/download?filename_path={}/paper_start.docx".format(query_id)
# content_path = os.path.join(uuid_path, "content.txt")
# load_result_path = res_path.format(query_id)
# load_result_path = os.path.abspath(load_result_path)
@ -378,8 +403,8 @@ def search():
result_text = {'code': "203", 'text': "", 'probabilities': None}
return jsonify(result_text) # 返回结果
t = Thread(target=classify)
t.start()
t1 = Thread(target=classify)
t1.start()
if __name__ == "__main__":
app.run(host="0.0.0.0", port=14000, threaded=True, debug=False)

725
flask_sever_1.py

@ -14,6 +14,11 @@ import base64
import re
import urllib.parse as pa
import socket
from serve_config_1 import Config
import requests
config = Config()
app = Flask(__name__)
app.config["JSON_AS_ASCII"] = False
@ -23,71 +28,18 @@ s.connect(("8.8.8.8", 80))
localhostip = s.getsockname()[0]
lock = threading.RLock()
pool = redis.ConnectionPool(host='localhost', port=63179, max_connections=50, db=2, password='Zhicheng123*')
pool = redis.ConnectionPool(host=config.reids_ip, port=config.reids_port, max_connections=50, db=config.reids_db, password=config.reids_password)
redis_ = redis.Redis(connection_pool=pool, decode_responses=True)
pantten_second_biaoti = '[2二ⅡⅠ][、.]\s{0,}?[\u4e00-\u9fa5]+'
pantten_other_biaoti = '[2-9二三四五六七八九ⅡⅢⅣⅤⅥⅦⅧⅨ][、.]\s{0,}?[\u4e00-\u9fa5]+'
mulu_prompt = "请帮我根据题目为“{}”生成一个论文目录"
first_title_prompt = "论文题目是“{}”,目录是“{}”,请把其中的大标题“{}”的内容续写完整,保证续写内容不少于800字"
small_title_prompt = "论文题目是“{}”,目录是“{}”,请把其中的小标题“{}”的内容续写完整,保证续写内容不少于800字"
references_prompt = "论文题目是“{}”,目录是“{}”,请为这篇论文生成15篇左右的参考文献,要求其中有有中文参考文献不低于12篇,英文参考文献不低于2篇"
thank_prompt = "请以“{}”为题写一篇论文的致谢"
kaitibaogao_prompt = "请以《{}》为题目生成研究的主要的内容、背景、目的、意义,要求不少于100字"
chinese_abstract_prompt = "请以《{}》为题目生成论文摘要,要求不少于1500字"
english_abstract_prompt = "请把“{}”这段文字翻译成英文"
chinese_keyword_prompt = "请为“{}”这段论文摘要生成3-5个关键字"
english_keyword_prompt = "请把“{}”这几个关键字翻译成英文"
thanks = "致谢"
references = "参考文献"
dabiaoti = ["", "", "", "", "", "", "", ""]
project_data_txt_path = "/home/majiahui/ChatGPT_Sever/new_data_txt"
"""
key_list = [
{"ip": key-api},
{"ip": key-api},
{"ip": key-api},
]
redis_title = []
redis_title_ing = []
redis_small_task = [
{
uuid,
api_key,
mulu_title_id,
title,
mulu,
subtitle,
prompt
}
]
redis_res = [
{
"uuid":
"完成进度":
"标题":
"中文摘要":"",
"英文摘要"
"中文关键字"
"英文关键字"
"正文" : [""] * len(content)
}
] -
> list()
"""
openaikey_list = ["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",
"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",
"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",
"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",
"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",
"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",
"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",
"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",
"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"]
flask_serves_env = "http://{}:{}".format(localhostip,config.flask_port)
paper_download_url = flask_serves_env + "/download?filename_path={}/paper.docx"
paper_start_download_url = flask_serves_env + "/download?filename_path={}/paper_start.docx"
redis_key_name_openaikey_bad_dict = "openaikey_bad_list_{}".format(str(localhostip))
redis_key_name_openaikey_list = "openaikey_list_{}".format(str(localhostip))
@ -99,82 +51,102 @@ redis_small_task = "redis_small_task"
redis_res = "redis_res"
for i in openaikey_list:
for i in config.openaikey_list:
redis_.rpush(redis_key_name_openaikey_list, i)
redis_.hset(redis_key_name_openaikey_bad_dict, "1", "1")
redis_.persist(redis_key_name_openaikey_list)
redis_.persist(redis_key_name_openaikey_bad_dict)
def chat_kaitibaogao(api_key, uuid, main_parameter):
# t = Thread(target=chat_kaitibaogao, args=(api_key,
# uuid,
# main_parameter
# time.sleep(1)
openai.api_key = api_key
res = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "user", "content": kaitibaogao_prompt.format(main_parameter[0])},
def request_api_chatgpt(api_key, prompt):
OPENAI_API_KEY = api_key
url = "https://api.openai.com/v1/chat/completions"
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {OPENAI_API_KEY}"
}
data = {
"model": "gpt-3.5-turbo",
"messages": [
{"role": "user", "content": prompt},
],
temperature=0.5
)
kaitibaogao = res.choices[0].message.content
# kaitibaogao_path = os.path.join(, "kaitibaogao.txt")
# with open(kaitibaogao_path, 'w', encoding='utf8') as f_kaitibaogao:
# f_kaitibaogao.write(kaitibaogao)
redis_.rpush(redis_key_name_openaikey_list, api_key)
"temperature": 0.5
}
response = requests.post(url,
headers=headers,
data=json.dumps(data),
timeout=600)
return response
def chat_kaitibaogao(api_key, uuid, main_parameter,task_type):
try:
response =request_api_chatgpt(api_key, config.kaitibaogao_prompt.format(main_parameter[0]))
res = response.json()
kaitibaogao = res["choices"][0]["message"]["content"]
redis_.rpush(redis_key_name_openaikey_list, api_key)
redis_.persist(redis_key_name_openaikey_list)
except:
"""
发送警报
"""
kaitibaogao = ""
redis_.hset(redis_key_name_openaikey_bad_dict, uuid, str((api_key,task_type)))
lock.acquire()
res_dict_str = redis_.hget(redis_res, uuid)
res_dict = json.loads(res_dict_str)
res_dict["tasking_num"] += 1
print("子任务进度".format(uuid),res_dict["tasking_num"])
res_dict["开题报告"] = kaitibaogao
res_dict_str = json.dumps(res_dict, ensure_ascii=False)
redis_.hset(redis_res, uuid, res_dict_str)
lock.release()
def chat_abstract_keyword(api_key, uuid, main_parameter):
# api_key,
# uuid,
# main_parameter
# time.sleep(7)
openai.api_key = api_key
# 生成中文摘要
res = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "user", "content": chinese_abstract_prompt.format(main_parameter[0])},
],
temperature=0.5
)
chinese_abstract = res.choices[0].message.content
# 生成英文的摘要
res = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "user", "content": english_abstract_prompt.format(chinese_abstract)},
],
temperature=0.5
)
english_abstract = res.choices[0].message.content
# 生成中文关键字
res = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "user", "content": chinese_keyword_prompt.format(chinese_abstract)},
],
temperature=0.5
)
chinese_keyword = res.choices[0].message.content
# 生成英文关键字
res = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "user", "content": english_keyword_prompt.format(chinese_keyword)},
],
temperature=0.5
)
def chat_abstract_keyword(api_key, uuid, main_parameter, task_type):
try:
# 生成中文摘要
response =request_api_chatgpt(api_key, config.chinese_abstract_prompt.format(main_parameter[0]))
res = response.json()
chinese_abstract = res["choices"][0]["message"]["content"]
# 生成英文的摘要
response = request_api_chatgpt(api_key, config.english_abstract_prompt.format(chinese_abstract))
res = response.json()
english_abstract = res["choices"][0]["message"]["content"]
english_keyword = res.choices[0].message.content
# 生成中文关键字
response = request_api_chatgpt(api_key, config.chinese_keyword_prompt.format(chinese_abstract))
res = response.json()
chinese_keyword = res["choices"][0]["message"]["content"]
# 生成英文关键字
response = request_api_chatgpt(api_key, config.english_keyword_prompt.format(chinese_keyword))
res = response.json()
english_keyword = res["choices"][0]["message"]["content"]
redis_.rpush(redis_key_name_openaikey_list, api_key)
redis_.persist(redis_key_name_openaikey_list)
except:
"""
发送警报
"""
chinese_abstract = ""
english_abstract = ""
chinese_keyword = ""
english_keyword = ""
redis_.hset(redis_key_name_openaikey_bad_dict, uuid, str((api_key,task_type)))
paper_abstract_keyword = {
"中文摘要": chinese_abstract,
@ -191,11 +163,12 @@ def chat_abstract_keyword(api_key, uuid, main_parameter):
# lock.acquire()
# api_key_list.append(api_key)
# lock.release()
redis_.rpush(redis_key_name_openaikey_list, api_key)
lock.acquire()
res_dict_str = redis_.hget(redis_res, uuid)
res_dict = json.loads(res_dict_str)
res_dict["tasking_num"] += 1
print("子任务进度".format(uuid),res_dict["tasking_num"])
res_dict["中文摘要"] = paper_abstract_keyword["中文摘要"]
res_dict["英文摘要"] = paper_abstract_keyword["英文摘要"]
res_dict["中文关键词"] = paper_abstract_keyword["中文关键词"]
@ -205,7 +178,7 @@ def chat_abstract_keyword(api_key, uuid, main_parameter):
lock.release()
def chat_content(api_key, uuid, main_parameter):
def chat_content(api_key, uuid, main_parameter, task_type):
'''
:param api_key:
@ -222,20 +195,26 @@ def chat_content(api_key, uuid, main_parameter):
if subtitle[:2] == "@@":
res_content = subtitle[2:]
else:
openai.api_key = api_key
res = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "user", "content": prompt.format(title, mulu, subtitle)},
],
temperature=0.5
)
res_content = res.choices[0].message.content
redis_.rpush(redis_key_name_openaikey_list, api_key)
try:
response = request_api_chatgpt(api_key, prompt.format(title, mulu, subtitle))
res = response.json()
res_content = res["choices"][0]["message"]["content"]
redis_.rpush(redis_key_name_openaikey_list, api_key)
redis_.persist(redis_key_name_openaikey_list)
except:
"""
发送警报
"""
res_content = ""
redis_.hset(redis_key_name_openaikey_bad_dict, uuid, str((api_key,task_type)))
lock.acquire()
res_dict_str = redis_.hget(redis_res, uuid)
res_dict = json.loads(res_dict_str)
res_dict["tasking_num"] += 1
print("子任务进度".format(uuid), res_dict["tasking_num"])
table_of_contents = res_dict["table_of_contents"]
table_of_contents[content_index] = res_content
res_dict["table_of_contents"] = table_of_contents
@ -244,7 +223,7 @@ def chat_content(api_key, uuid, main_parameter):
lock.release()
def chat_thanks(api_key, uuid, main_parameter):
def chat_thanks(api_key, uuid, main_parameter, task_type):
'''
:param api_key:
@ -257,31 +236,32 @@ def chat_thanks(api_key, uuid, main_parameter):
title = main_parameter[0]
prompt = main_parameter[1]
openai.api_key = api_key
res = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "user", "content": prompt.format(title)},
],
temperature=0.5
)
res_content = res.choices[0].message.content
redis_.rpush(redis_key_name_openaikey_list, api_key)
# "致谢": "",
# "参考文献": "",
# 加锁 读取redis生成致谢并存储
try:
response = request_api_chatgpt(api_key, prompt.format(title))
res = response.json()
res_content = res["choices"][0]["message"]["content"]
redis_.rpush(redis_key_name_openaikey_list, api_key)
redis_.persist(redis_key_name_openaikey_list)
except:
"""
发送警报
"""
res_content = ""
redis_.hset(redis_key_name_openaikey_bad_dict, uuid, str((api_key,task_type)))
lock.acquire()
res_dict_str = redis_.hget(redis_res, uuid)
res_dict = json.loads(res_dict_str)
res_dict["tasking_num"] += 1
print("子任务进度".format(uuid), res_dict["tasking_num"])
res_dict["致谢"] = res_content
res_dict_str = json.dumps(res_dict, ensure_ascii=False)
redis_.hset(redis_res, uuid, res_dict_str)
lock.release()
def chat_references(api_key, uuid, main_parameter):
def chat_references(api_key, uuid, main_parameter, task_type):
'''
:param api_key:
@ -295,195 +275,205 @@ def chat_references(api_key, uuid, main_parameter):
title = main_parameter[0]
mulu = main_parameter[1]
prompt = main_parameter[2]
try:
response = request_api_chatgpt(api_key, prompt.format(title, mulu))
res = response.json()
res_content = res["choices"][0]["message"]["content"]
openai.api_key = api_key
res = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "user", "content": prompt.format(title, mulu)},
],
temperature=0.5
)
res_content = res.choices[0].message.content
redis_.rpush(redis_key_name_openaikey_list, api_key)
redis_.rpush(redis_key_name_openaikey_list, api_key)
redis_.persist(redis_key_name_openaikey_list)
except:
"""
发送警报
"""
res_content = ""
redis_.hset(redis_key_name_openaikey_bad_dict, uuid, str((api_key,task_type)))
# 加锁 读取resis并存储结果
lock.acquire()
res_dict_str = redis_.hget(redis_res, uuid)
res_dict = json.loads(res_dict_str)
res_dict["tasking_num"] += 1
print("子任务进度".format(uuid), res_dict["tasking_num"])
res_dict["参考文献"] = res_content
res_dict_str = json.dumps(res_dict, ensure_ascii=False)
redis_.hset(redis_res, uuid, res_dict_str)
lock.release()
def threading_mulu(key_api, title, uuid):
def threading_mulu(api_key, title, uuid):
'''
生成目录并吧任务拆解进入子任务的redis_list中和储存结果的redis_list中
:return:
'''
openai.api_key = key_api
res = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "user", "content": mulu_prompt.format(title)},
],
temperature=0.5
)
redis_.rpush(redis_key_name_openaikey_list, key_api)
mulu = res.choices[0].message.content
mulu_list = str(mulu).split("\n")
mulu_list = [i.strip() for i in mulu_list if i != ""]
print(mulu_list)
cun_bool = False
table_of_contents = [mulu_list[0]]
for i in mulu_list[1:]:
result_second_biaoti_list = re.findall(pantten_second_biaoti, i)
result_other_biaoti_list = re.findall(pantten_other_biaoti, i)
if result_second_biaoti_list != []:
table_of_contents.append("@@" + i)
cun_bool = True
continue
if cun_bool == False:
continue
else:
if result_other_biaoti_list != []:
try:
response = request_api_chatgpt(api_key, config.mulu_prompt.format(title))
res = response.json()
mulu = res["choices"][0]["message"]["content"]
redis_.rpush(redis_key_name_openaikey_list, api_key)
redis_.persist(redis_key_name_openaikey_list)
except:
"""
发送警报
"""
res_content = ""
redis_.hset(redis_key_name_openaikey_bad_dict, uuid, str(api_key,"mulu"))
mulu = ""
try:
mulu_list = str(mulu).split("\n")
mulu_list = [i.strip() for i in mulu_list if i != ""]
print(mulu_list)
cun_bool = False
table_of_contents = [mulu_list[0]]
for i in mulu_list[1:]:
result_second_biaoti_list = re.findall(config.pantten_second_biaoti, i)
result_other_biaoti_list = re.findall(config.pantten_other_biaoti, i)
if result_second_biaoti_list != []:
table_of_contents.append("@@" + i)
cun_bool = True
continue
if cun_bool == False:
continue
else:
table_of_contents.append(i)
print(table_of_contents)
# table_of_contents = table_of_contents[:3] + table_of_contents[-1:]
# print(table_of_contents)
thanks_references_bool_table = table_of_contents[-3:]
# thanks = "致谢"
# references = "参考文献"
if references in thanks_references_bool_table:
table_of_contents.remove(references)
if thanks in thanks_references_bool_table:
table_of_contents.remove(thanks)
# table_of_contents.append(thanks)
# table_of_contents.append(references)
# if thanks not in thanks_bool_table:
# table_of_contents.insert(-1, "致谢")
#
# if thanks not in thanks_bool_table:
# table_of_contents.insert(-1, "致谢")
print(len(table_of_contents))
small_task_list = []
# api_key,
# index,
# title,
# mulu,
# subtitle,
# prompt
kaitibaogao_task = {
"task_type": "kaitibaogao",
"uuid": uuid,
"main_parameter": [title]
}
if result_other_biaoti_list != []:
table_of_contents.append("@@" + i)
else:
table_of_contents.append(i)
print(table_of_contents)
# table_of_contents = table_of_contents[:3] + table_of_contents[-1:]
# print(table_of_contents)
thanks_references_bool_table = table_of_contents[-3:]
# thanks = "致谢"
# references = "参考文献"
if references in thanks_references_bool_table:
table_of_contents.remove(references)
if thanks in thanks_references_bool_table:
table_of_contents.remove(thanks)
# table_of_contents.append(thanks)
# table_of_contents.append(references)
# if thanks not in thanks_bool_table:
# table_of_contents.insert(-1, "致谢")
#
# if thanks not in thanks_bool_table:
# table_of_contents.insert(-1, "致谢")
print(len(table_of_contents))
small_task_list = []
# api_key,
# index,
# title,
# mulu,
# subtitle,
# prompt
kaitibaogao_task = {
"task_type": "kaitibaogao",
"uuid": uuid,
"main_parameter": [title]
}
chat_abstract_task = {
"task_type": "chat_abstract",
"uuid": uuid,
"main_parameter": [title]
}
small_task_list.append(kaitibaogao_task)
small_task_list.append(chat_abstract_task)
content_index = 0
while True:
if content_index == len(table_of_contents):
break
subtitle = table_of_contents[content_index]
if content_index == 0:
prompt = first_title_prompt
elif subtitle == "参考文献":
prompt = references_prompt
elif subtitle == "致谢":
prompt = thank_prompt
else:
prompt = small_title_prompt
print("请求的所有参数",
content_index,
title,
subtitle,
prompt)
paper_content = {
"task_type": "paper_content",
chat_abstract_task = {
"task_type": "chat_abstract",
"uuid": uuid,
"main_parameter": [title]
}
small_task_list.append(kaitibaogao_task)
small_task_list.append(chat_abstract_task)
content_index = 0
while True:
if content_index == len(table_of_contents):
break
subtitle = table_of_contents[content_index]
if content_index == 0:
prompt = config.first_title_prompt
elif subtitle == "参考文献":
prompt = config.references_prompt
elif subtitle == "致谢":
prompt = config.thank_prompt
else:
prompt = config.small_title_prompt
print("请求的所有参数",
content_index,
title,
subtitle,
prompt)
paper_content = {
"task_type": "paper_content",
"uuid": uuid,
"main_parameter": [
content_index,
title,
mulu,
subtitle,
prompt
]
}
small_task_list.append(paper_content)
content_index += 1
thanks_task = {
"task_type": "thanks_task",
"uuid": uuid,
"main_parameter": [
content_index,
title,
mulu,
subtitle,
prompt
config.thank_prompt
]
}
small_task_list.append(paper_content)
content_index += 1
thanks_task = {
"task_type": "thanks_task",
"uuid": uuid,
"main_parameter": [
title,
thank_prompt
]
}
references_task = {
"task_type": "references_task",
"uuid": uuid,
"main_parameter": [
references_task = {
"task_type": "references_task",
"uuid": uuid,
"main_parameter": [
title,
mulu,
references_prompt
config.references_prompt
]
}
}
small_task_list.append(thanks_task)
small_task_list.append(references_task)
for small_task in small_task_list:
small_task = json.dumps(small_task, ensure_ascii=False)
redis_.rpush(redis_small_task, small_task)
res = {
"uuid": uuid,
"num_small_task": len(small_task_list),
"tasking_num": 0,
"标题": title,
"目录": mulu,
"开题报告": "",
"任务书": "",
"中文摘要": "",
"英文摘要": "",
"中文关键词": "",
"英文关键词": "",
"正文": "",
"致谢": "",
"参考文献": "",
"table_of_contents": [""] * len(table_of_contents)
}
small_task_list.append(thanks_task)
small_task_list.append(references_task)
res = json.dumps(res, ensure_ascii=False)
redis_.hset(redis_res, uuid, res)
for small_task in small_task_list:
small_task = json.dumps(small_task, ensure_ascii=False)
redis_.rpush(redis_small_task, small_task)
redis_.persist(redis_key_name_openaikey_list)
res = {
"uuid": uuid,
"num_small_task": len(small_task_list),
"tasking_num": 0,
"标题": title,
"目录": mulu,
"开题报告": "",
"任务书": "",
"中文摘要": "",
"英文摘要": "",
"中文关键词": "",
"英文关键词": "",
"正文": "",
"致谢": "",
"参考文献": "",
"table_of_contents": [""] * len(table_of_contents)
}
res = json.dumps(res, ensure_ascii=False)
redis_.hset(redis_res, uuid, res)
except:
print("目录程序错误")
def threading_1():
@ -542,32 +532,32 @@ def threading_2():
if task_type == "kaitibaogao":
t = Thread(target=chat_kaitibaogao, args=(api_key,
uuid,
main_parameter
))
main_parameter,
task_type))
t.start()
elif task_type == "chat_abstract":
t = Thread(target=chat_abstract_keyword, args=(api_key,
uuid,
main_parameter
))
main_parameter,
task_type))
t.start()
elif task_type == "paper_content":
t = Thread(target=chat_content, args=(api_key,
uuid,
main_parameter
))
main_parameter,
task_type))
t.start()
elif task_type == "thanks_task":
t = Thread(target=chat_thanks, args=(api_key,
uuid,
main_parameter
))
uuid,
main_parameter,
task_type))
t.start()
elif task_type == "references_task":
t = Thread(target=chat_references, args=(api_key,
uuid,
main_parameter
))
uuid,
main_parameter,
task_type))
t.start()
else:
time.sleep(1)
@ -584,36 +574,36 @@ def threading_3():
# "tasking_num": 0,
if int(values_dict["num_small_task"]) == int(values_dict["tasking_num"]):
res_end_list.append(key)
for key in res_end_list:
redis_.hdel(redis_res, key)
res_str = res_dict[key].decode("utf-8")
json_str = json.dumps(res_str, indent=4, ensure_ascii=False)
key = str(key, encoding="utf-8")
uuid_path = os.path.join(project_data_txt_path, key)
os.makedirs(uuid_path)
paper_content_path = os.path.join(uuid_path, "paper_content.json")
with open(paper_content_path, 'w') as json_file:
json_file.write(json_str)
"""
调用jar包
占位
"""
url_path_paper = "http://104.244.90.248:14000/download?filename_path={}/paper.docx".format(key)
url_path_kaiti = "http://104.244.90.248:14000/download?filename_path={}/paper_start.docx".format(key)
return_text = str({"id": key,
"content_url_path": url_path_paper,
"content_report_url_path": url_path_kaiti,
"probabilities": None,
"status_code": 200})
redis_.srem(redis_title_ing, key)
redis_.set(key, return_text, 28800)
if res_end_list != []:
for key in res_end_list:
redis_.hdel(redis_res, key)
res_str = res_dict[key].decode("utf-8")
json_str = json.dumps(res_str, indent=4, ensure_ascii=False)
key = str(key, encoding="utf-8")
uuid_path = os.path.join(config.project_data_txt_path, key)
os.makedirs(uuid_path)
paper_content_path = os.path.join(uuid_path, "paper_content.json")
with open(paper_content_path, 'w') as json_file:
json_file.write(json_str)
"""
调用jar包
占位
"""
url_path_paper = paper_download_url.format(key)
url_path_kaiti = paper_start_download_url.format(key)
return_text = str({"id": key,
"content_url_path": url_path_paper,
"content_report_url_path": url_path_kaiti,
"probabilities": None,
"status_code": 200})
redis_.srem(redis_title_ing, key)
redis_.set(key, return_text, 28800)
time.sleep(1)
@ -627,15 +617,15 @@ def threading_3():
# redis_.rpush(redis_title, json.dumps({"id": id_, "title": title})) # 加入redis
@app.route("/chat", methods=["POST"])
def chat():
print(request.remote_addr)
title = request.json["title"]
id_ = str(uuid.uuid1())
print(id_)
redis_.rpush(redis_title, json.dumps({"id":id_, "title": title})) # 加入redis
return_text = {"texts": {'id': id_,}, "probabilities": None, "status_code": 200}
redis_.rpush(redis_title, json.dumps({"id": id_, "title": title},ensure_ascii=False)) # 加入redis
redis_.persist(redis_key_name_openaikey_list)
return_text = {"texts": {'id': id_, }, "probabilities": None, "status_code": 200}
print("ok")
redis_.sadd(redis_title_ing, id_)
@ -653,12 +643,11 @@ def download_file():
# response.headers["Content-Disposition"] = "attachment; filename={}".format(filename.encode().decode('latin-1'))
filename_path = request.args.get('filename_path', '')
filename = filename_path.split("/")[1]
path_name = os.path.join(project_data_txt_path, filename_path)
path_name = os.path.join(config.project_data_txt_path, filename_path)
with open(path_name, 'rb') as f:
stream = f.read()
response = Response(stream, content_type='application/octet-stream')
response.headers['Content-disposition'] = 'attachment; filename={}'.format(filename)
return response
@ -669,19 +658,22 @@ def search():
if result is not None:
# redis_.delete(id_)
# result_dict = result.decode('UTF-8')
if redis_.hexists(redis_key_name_openaikey_bad_dict, id_) == True:
result_text = {'code': "204", 'text': "", 'probabilities': None}
result_dict = eval(result)
# return_text = {"id":query_id, "load_result_path": load_result_path, "probabilities": None, "status_code": 200}
query_id = result_dict["id"]
# "content_url_path": url_path_paper,
# "content_report_url_path": url_path_kaiti,
content_url_path = result_dict["content_url_path"]
content_report_url_path = result_dict["content_report_url_path"]
probabilities = result_dict["probabilities"]
result_text = {'code': 200,
'content_url_path': content_url_path,
'content_report_url_path': content_report_url_path,
'probabilities': probabilities}
else:
result_dict = eval(result)
# return_text = {"id":query_id, "load_result_path": load_result_path, "probabilities": None, "status_code": 200}
query_id = result_dict["id"]
# "content_url_path": url_path_paper,
# "content_report_url_path": url_path_kaiti,
content_url_path = result_dict["content_url_path"]
content_report_url_path = result_dict["content_report_url_path"]
probabilities = result_dict["probabilities"]
result_text = {'code': 200,
'content_url_path': content_url_path,
'content_report_url_path': content_report_url_path,
'probabilities': probabilities}
else:
querying_list = list(redis_.smembers(redis_title_ing))
querying_set = set()
@ -722,7 +714,6 @@ t.start()
t = Thread(target=threading_3)
t.start()
if __name__ == '__main__':
# main("大型商业建筑人员疏散设计研究")
app.run(host="0.0.0.0", port=14002, threaded=True, debug=False)

11
json_模板.json

@ -0,0 +1,11 @@
{
"标题":"",
"目录":"",
"中文摘要":"",
"英文摘要":"",
"正文": ["一、绪论",
"1.1 研究背景\n在大型商业建筑人员疏散管理中,建立科学的管理制度是非常必要的。1.2 研究背景\n在大型商业建筑人员疏散管理中,建立科学的管理制度是非常必要的。",
"二、机械手臂的设计与分析",
"二、机械手臂的设计与分析"
]
}

203
lock_flask.py

@ -8,6 +8,12 @@
@Describe:
"""
## TODO 输入列表问题
## TODO 服务停止在启动redis回滚问题
## TODO 多线程问题
## TODO ip地址问题
## TODO 请求国内地址
import os
from flask import Flask, jsonify, Response
from flask import request
@ -26,7 +32,7 @@ import re
import urllib.parse as pa
pool = redis.ConnectionPool(host='localhost', port=6379, max_connections=50, db=2)
pool = redis.ConnectionPool(host='localhost', port=6379, max_connections=50, db=1)
redis_ = redis.Redis(connection_pool=pool, decode_responses=True)
db_key_query = 'query'
@ -40,12 +46,16 @@ import logging
lock = threading.RLock()
mulu_prompt = "请帮我根据题目为“{}”生成一个论文目录"
first_title_prompt = "论文题目是“{}”,目录是“{}”,请把其中的大标题“{}”的内容续写完整,保证续写内容不少于1000字"
small_title_prompt = "论文题目是“{}”,目录是“{}”,请把其中的小标题“{}”的内容续写完整,保证续写内容不少于1000字"
references_prompt = "论文题目是“{}”,目录是“{}”,请为这篇论文生成中文的{}"
first_title_prompt = "论文题目是“{}”,目录是“{}”,请把其中的大标题“{}”的内容续写完整,保证续写内容不少于800字"
small_title_prompt = "论文题目是“{}”,目录是“{}”,请把其中的小标题“{}”的内容续写完整,保证续写内容不少于800字"
references_prompt = "论文题目是“{}”,目录是“{}”,请为这篇论文生成15篇中文的{},要求其中有有中文参考文献不低于12篇,英文参考文献不低于2篇"
thank_prompt = "论文题目是“{}”,目录是“{}”,请把其中的{}部分续写完整"
kaitibaogao_prompt = "请以《{}》为题目生成研究的主要的内容、背景、目的、意义,要求不少于1500字"
chinese_abstract_prompt = "请以《{}》为题目生成论文摘要,要求不少于500字"
english_abstract_prompt = "请把“{}”这段文字翻译成英文"
chinese_keyword_prompt = "请为“{}”这段论文摘要生成3-5个关键字"
english_keyword_prompt = "请把“{}”这几个关键字翻译成英文"
thanks = "致谢"
dabiaoti = ["","","","","","","",""]
# 正则
@ -57,7 +67,14 @@ project_data_txt_path = "/home/majiahui/ChatGPT_Sever/new_data_txt"
api_key_list = ["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",
"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",
"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",
"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"]
"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",
"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",
"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",
"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",
"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",
"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"]
# "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"
def chat_title(title, api_key):
global lock
@ -82,6 +99,7 @@ def chat_title(title, api_key):
def chat_kaitibaogao(title, api_key, uuid_path):
global lock
# time.sleep(1)
openai.api_key = api_key
res = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
@ -99,6 +117,66 @@ def chat_kaitibaogao(title, api_key, uuid_path):
lock.release()
def chat_abstract_keyword(title, api_key, uuid_path):
global lock
# time.sleep(7)
openai.api_key = api_key
# 生成中文摘要
res = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "user", "content": chinese_abstract_prompt.format(title)},
],
temperature=0.5
)
chinese_abstract = res.choices[0].message.content
# 生成英文的摘要
res = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "user", "content": english_abstract_prompt.format(chinese_abstract)},
],
temperature=0.5
)
english_abstract = res.choices[0].message.content
# 生成中文关键字
res = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "user", "content": chinese_keyword_prompt.format(chinese_abstract)},
],
temperature=0.5
)
chinese_keyword = res.choices[0].message.content
# 生成英文关键字
res = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "user", "content": english_keyword_prompt.format(chinese_keyword)},
],
temperature=0.5
)
english_keyword = res.choices[0].message.content
paper_abstract_keyword = {
"中文摘要": chinese_abstract,
"英文摘要": english_abstract,
"中文关键字": chinese_keyword,
"英文关键字": english_keyword
}
json_str = json.dumps(paper_abstract_keyword, indent=4, ensure_ascii=False)
abstract_keyword_path = os.path.join(uuid_path, "abstract_keyword.json")
with open(abstract_keyword_path, 'w') as json_file:
json_file.write(json_str)
lock.acquire()
api_key_list.append(api_key)
lock.release()
class GeneratePaper:
def __init__(self, mulu, table):
self.mulu = mulu
@ -107,8 +185,7 @@ class GeneratePaper:
def chat_content_(self,api_key, mulu_title_id, title, mulu, subtitle, prompt):
global lock
# time.sleep(5)
# api_key_list.append(api_key)
# self.paper[mulu_title_id] = subtitle
self.paper[mulu_title_id] = subtitle
if subtitle[:2] == "@@":
self.paper[mulu_title_id] = subtitle[2:]
else:
@ -148,7 +225,10 @@ def classify(): # 调用模型,设置最大batch_size
# 生成开题报告
# title, api_key, uuid_path
lock.acquire()
api_key = api_key_list.pop()
lock.release()
t = Thread(target=chat_kaitibaogao, args=(title,
api_key,
uuid_path,
@ -156,6 +236,17 @@ def classify(): # 调用模型,设置最大batch_size
t.start()
thread_list.append(t)
# 生成中英文摘要
lock.acquire()
api_key = api_key_list.pop()
lock.release()
t = Thread(target=chat_abstract_keyword, args=(title,
api_key,
uuid_path,
))
t.start()
thread_list.append(t)
# 生成目录
while True:
if api_key_list != []:
@ -198,15 +289,25 @@ def classify(): # 调用模型,设置最大batch_size
print(table_of_contents)
# table_of_contents = table_of_contents[:3] + table_of_contents[-1:]
# print(table_of_contents)
thanks_bool_table = table_of_contents[-3:]
if thanks not in thanks_bool_table:
table_of_contents.insert(-1, "致谢")
chat_class = GeneratePaper(mulu_list, table_of_contents)
print(len(table_of_contents))
############################################################
while True:
if api_key_list == []:
continue
if index == len(table_of_contents):
break
lock.acquire()
api_key = api_key_list.pop()
lock.release()
subtitle = table_of_contents[index]
if index == 0:
prompt = first_title_prompt
@ -215,18 +316,17 @@ def classify(): # 调用模型,设置最大batch_size
elif subtitle == "致谢":
prompt = thank_prompt
else:
prompt = first_title_prompt
prompt = small_title_prompt
print("请求的所有参数", api_key,
index,
title,
mulu_list,
subtitle,
prompt)
t = Thread(target=chat_class.chat_content_, args=(api_key,
index,
title,
mulu_list,
mulu,
subtitle,
prompt))
t.start()
@ -239,20 +339,57 @@ def classify(): # 调用模型,设置最大batch_size
thread.join()
print(chat_class.paper)
paper = "\n".join(chat_class.paper)
print(paper)
paper_content_list = chat_class.paper
# paper = "\n".join(chat_class.paper)
# print(paper)
# 不要txt,修改为json
#判断 摘要是否生成完成
abstract_keyword_path = os.path.join(uuid_path, "abstract_keyword.json")
while True:
print(abstract_keyword_path)
print(os.path.exists(abstract_keyword_path))
if os.path.exists(abstract_keyword_path) == True:
break
time.sleep(3)
with open(abstract_keyword_path, "r", encoding="utf-8") as f:
abstract_keyword_dict = json.load(f)
content_path = os.path.join(uuid_path, "content.txt")
with open(content_path, 'w', encoding='utf8') as f_content:
f_content.write(paper)
# 开题报告
kaitibaogao_path = os.path.join(uuid_path, "kaitibaogao.txt")
while True:
print(kaitibaogao_path)
print(os.path.exists(kaitibaogao_path))
if os.path.exists(kaitibaogao_path) == True:
break
time.sleep(3)
with open(kaitibaogao_path, "r", encoding="utf-8") as f:
kaitibaogao = f.read()
print("文件路径检测完成")
paper_dict = {}
for i in abstract_keyword_dict:
paper_dict[i] = abstract_keyword_dict[i]
paper_dict["正文"] = paper_content_list
paper_dict["目录"] = mulu
paper_dict["开题报告"] = kaitibaogao
json_str = json.dumps(paper_dict, indent=4, ensure_ascii=False)
paper_content_path = os.path.join(uuid_path, "paper_content.json")
with open(paper_content_path, 'w') as json_file:
json_file.write(json_str)
# content_path = os.path.join(uuid_path, "content.txt")
# with open(content_path, 'w', encoding='utf8') as f_content:
# f_content.write(paper_dict)
mulu_path = os.path.join(uuid_path, "mulu.txt")
with open(mulu_path, 'w', encoding='utf8') as f_mulu:
f_mulu.write(mulu)
kaitibaogao_txt_path = os.path.join(uuid_path, "kaitibaogao.txt")
# word保存路径
save_word_paper = os.path.join(uuid_path, "paper.docx")
@ -264,19 +401,19 @@ def classify(): # 调用模型,设置最大batch_size
# f2.write(content_base64)
# 拼接成word
title = pa.quote(title)
mulu_path = mulu_path
content_path = content_path
# 调用jar包
print("java_path", mulu_path, content_path, title, save_word_paper)
os.system(
"java -Dfile.encoding=UTF-8 -jar '/home/majiahui/ChatGPT_Sever/createAiXieZuoWord.jar' '{}' '{}' '{}' '{}'".format(
mulu_path, content_path, title, save_word_paper))
print("jaba_kaitibaogao", kaitibaogao_txt_path, save_word_paper_start)
os.system("java -Dfile.encoding=UTF-8 -jar '/home/majiahui/ChatGPT_Sever/createAiXieZuoKaitiWord.jar' '{}' '{}'".format(
kaitibaogao_txt_path, save_word_paper_start))
# title = pa.quote(title)
# mulu_path = mulu_path
# content_path = content_path
#
# # 调用jar包
# print("java_path", mulu_path, content_path, title, save_word_paper)
# os.system(
# "java -Dfile.encoding=UTF-8 -jar '/home/majiahui/ChatGPT_Sever/createAiXieZuoWord.jar' '{}' '{}' '{}' '{}'".format(
# mulu_path, content_path, title, save_word_paper))
#
# print("jaba_kaitibaogao", kaitibaogao_txt_path, save_word_paper_start)
# os.system("java -Dfile.encoding=UTF-8 -jar '/home/majiahui/ChatGPT_Sever/createAiXieZuoKaitiWord.jar' '{}' '{}'".format(
# kaitibaogao_txt_path, save_word_paper_start))
url_path_paper = "http://104.244.90.248:14000/download?filename_path={}/paper.docx".format(query_id)
url_path_kaiti = "http://104.244.90.248:14000/download?filename_path={}/paper_start.docx".format(query_id)
@ -382,4 +519,4 @@ t1 = Thread(target=classify)
t1.start()
if __name__ == "__main__":
app.run(host="0.0.0.0", port=14002, threaded=True, debug=False)
app.run(host="0.0.0.0", port=14000, threaded=True, debug=False)

45
serve_config_1.py

@ -0,0 +1,45 @@
class Config:
def __init__(self):
# 目录提取拼接相关参数
self.pantten_second_biaoti = '[2二ⅡⅠ][、.]\s{0,}?[\u4e00-\u9fa5]+'
self.pantten_other_biaoti = '[2-9二三四五六七八九ⅡⅢⅣⅤⅥⅦⅧⅨ][、.]\s{0,}?[\u4e00-\u9fa5]+'
# chatgpt 接口相关参数
self.mulu_prompt = "请帮我根据题目为“{}”生成一个论文目录"
self.first_title_prompt = "论文题目是“{}”,目录是“{}”,请把其中的大标题“{}”的内容补充完整,补充内容字数在100字左右"
self.small_title_prompt = "论文题目是“{}”,目录是“{}”,请把其中的小标题“{}”的内容补充完整,补充内容字数在100字左右"
self.references_prompt = "论文题目是“{}”,目录是“{}”,请为这篇论文生成15篇左右的参考文献,要求其中有有中文参考文献不低于12篇,英文参考文献不低于2篇"
self.thank_prompt = "请以“{}”为题写一篇论文的致谢"
self.kaitibaogao_prompt = "请以《{}》为题目生成研究的主要的内容、背景、目的、意义,要求不少于100字"
self.chinese_abstract_prompt = "请以《{}》为题目生成论文摘要,要求生成的字数在100字左右"
self.english_abstract_prompt = "请把“{}”这段文字翻译成英文"
self.chinese_keyword_prompt = "请为“{}”这段论文摘要生成3-5个关键字"
self.english_keyword_prompt = "请把“{}”这几个关键字翻译成英文"
self.dabiaoti = ["", "", "", "", "", "", "", ""]
self.project_data_txt_path = "/home/majiahui/ChatGPT_Sever/new_data_txt"
self.openaikey_list = ["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",
"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",
"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"
]
# "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",
# "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"
# "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",
# "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",
# "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",
# "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"
# 流程相关参数
self.thanks = "致谢"
self.references = "参考文献"
# flask port
self.flask_port = "14000"
# redis config
self.reids_ip = 'localhost'
self.reids_port = 63179
self.reids_db = 2
self.reids_password='Zhicheng123*'

43
serve_config_2.py

@ -0,0 +1,43 @@
class Config:
def __init__(self):
# 目录提取拼接相关参数
self.pantten_second_biaoti = '[2二ⅡⅠ][、.]\s{0,}?[\u4e00-\u9fa5]+'
self.pantten_other_biaoti = '[2-9二三四五六七八九ⅡⅢⅣⅤⅥⅦⅧⅨ][、.]\s{0,}?[\u4e00-\u9fa5]+'
# chatgpt 接口相关参数
self.mulu_prompt = "请帮我根据题目为“{}”生成一个论文目录"
self.first_title_prompt = "论文题目是“{}”,目录是“{}”,请把其中的大标题“{}”的内容补充完整,补充内容字数在100字左右"
self.small_title_prompt = "论文题目是“{}”,目录是“{}”,请把其中的小标题“{}”的内容补充完整,补充内容字数在100字左右"
self.references_prompt = "论文题目是“{}”,目录是“{}”,请为这篇论文生成15篇左右的参考文献,要求其中有有中文参考文献不低于12篇,英文参考文献不低于2篇"
self.thank_prompt = "请以“{}”为题写一篇论文的致谢"
self.kaitibaogao_prompt = "请以《{}》为题目生成研究的主要的内容、背景、目的、意义,要求不少于100字"
self.chinese_abstract_prompt = "请以《{}》为题目生成论文摘要,要求生成的字数在100字左右"
self.english_abstract_prompt = "请把“{}”这段文字翻译成英文"
self.chinese_keyword_prompt = "请为“{}”这段论文摘要生成3-5个关键字"
self.english_keyword_prompt = "请把“{}”这几个关键字翻译成英文"
self.dabiaoti = ["", "", "", "", "", "", "", ""]
self.project_data_txt_path = "/home/majiahui/ChatGPT_Sever/new_data_txt"
self.openaikey_list = [
"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",
"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",
"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"
]
# "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",
# "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",
# "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"
# 流程相关参数
self.thanks = "致谢"
self.references = "参考文献"
# flask port
self.flask_port = "14000"
# redis config
self.reids_ip = '104.244.89.190'
self.reids_port = 63179
self.reids_db = 2
self.reids_password='Zhicheng123*'

20
查询uuid.py

@ -1,13 +1,27 @@
import requests
import time
data = {"id": "58abde1c-d1ef-11ed-a2cd-aaaa001aad2e"}
data = {"id": "a259c76a-d521-11ed-b23c-aaaa001b4bbf"}
start = time.time()
res = requests.post('http://104.244.90.248:14000/search', json=data)
res = requests.post('http://104.244.89.190:14002/search', json=data)
end = time.time()
print(end - start)
print(res.text)
print(res.text)
'''
da823db0-d50e-11ed-a38c-aaaa001b4bbf
e8bb13f2-d50e-11ed-b37a-aaaa001b4bbf
f3e2c216-d50e-11ed-9869-aaaa001b4bbf
268f0b52-d50f-11ed-b741-aaaa001b4bbf
'''
'''a9880f98-d516-11ed-b1f9-aaaa001b4bbf
b689132c-d516-11ed-b4e7-aaaa001b4bbf
c3f4bea8-d516-11ed-beda-aaaa001b4bbf
cd948cb8-d516-11ed-9847-aaaa001b4bbf
f2c5920c-d516-11ed-96b1-aaaa001b4bbf'''

22
测试chatgpt调用接口.py

@ -7,11 +7,13 @@
@Software:
@Describe:
"""
import time
import openai
import flask
def chat_drop():
openai.api_key = "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"
def chat_drop(api_key):
openai.api_key = api_key
res = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
@ -23,5 +25,19 @@ def chat_drop():
top_p=1,
)
print(res.choices[0].message.content)
time.sleep(3)
openaikey_list = ["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",
"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",
"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",
"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",
"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",
"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",
"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",
"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",
"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"]
for i in openaikey_list:
chat_drop(i)
chat_drop()
chat_drop("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")

13
测试flask多进程.py

@ -0,0 +1,13 @@
from flask import Flask
app = Flask(__name__)
@app.route("/")
def index():
filename_path = request.args.get('filename_path', '')
return "Hello world!"
if __name__ == "__main__":
app.run('0.0.0.0', port=11000, debug=True)

3
测试多进程.py

@ -1,6 +1,6 @@
import threading
num = 0
lock = threading.Lock()
@ -18,6 +18,7 @@ def sub():
lock.release()
for i in range(1000):
num = 0
t1 = threading.Thread(target=add, )
t2 = threading.Thread(target=sub, )
t1.start()

2
测试生成uuid.py

@ -5,7 +5,7 @@ data = {"title": "大型商业建筑人员疏散设计研究"}
start = time.time()
res = requests.post('http://104.244.90.248:14000/chat', json=data)
res = requests.post('http://104.244.89.190:14002/chat', json=data)
end = time.time()
print(end - start)

2
简单的flask.py

@ -3,7 +3,7 @@ from flask import Flask
app = Flask(__name__)
@app.route("/aa")
@app.route("/")
def index():
return "Hello world!"

Loading…
Cancel
Save