diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
index fa73296..0ef3ae5 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -1,38 +1,38 @@
----
-name: Bug report
-about: Create a bug report to help me solve the issue.
-title: "[BUG] "
-labels: bug, enhancement
-assignees: Official-Husko
-
----
-
-**Describe the bug**
-A clear and concise description of what the bug is.
-
-**To Reproduce**
-Steps to reproduce the behavior:
-1. Go to '...'
-2. Click on '....'
-3. Scroll down to '....'
-4. See error
-
-**Expected behavior**
-A clear and concise description of what you expected to happen.
-
-**Screenshots**
-If applicable, add screenshots to help explain your problem.
-
-**Desktop (please complete the following information):**
- - OS: [e.g. Windows]
- - Version [e.g. 10]
-
-**Used Parameters and Settings (please complete the following information):**
-- Site: [e.g. e621]
-- Tool Version: [e.g. ]
-- Tags/Link: [e.g. wolf female sfw]
-
-**Additional context**
-Add any other context about the problem here.
-
-### ***Please attach the runtime.log***
+---
+name: Bug report
+about: Create a bug report to help me solve the issue.
+title: "[BUG] "
+labels: bug, enhancement
+assignees: Official-Husko
+
+---
+
+**Describe the bug**
+A clear and concise description of what the bug is.
+
+**To Reproduce**
+Steps to reproduce the behavior:
+1. Go to '...'
+2. Click on '....'
+3. Scroll down to '....'
+4. See error
+
+**Expected behavior**
+A clear and concise description of what you expected to happen.
+
+**Screenshots**
+If applicable, add screenshots to help explain your problem.
+
+**Desktop (please complete the following information):**
+ - OS: [e.g. Windows]
+ - Version [e.g. 10]
+
+**Used Parameters and Settings (please complete the following information):**
+- Site: [e.g. e621]
+- Tool Version: [e.g. ]
+- Tags/Link: [e.g. wolf female sfw]
+
+**Additional context**
+Add any other context about the problem here.
+
+### ***Please attach the runtime.log***
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
index 7378322..c81100f 100644
--- a/.github/ISSUE_TEMPLATE/feature_request.md
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -1,26 +1,26 @@
----
-name: Feature request
-about: Suggest an idea for this project
-title: "[REQUEST] "
-labels: enhancement
-assignees: Official-Husko
-
----
-
-**Is your feature request related to a problem? Please describe.**
-A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
-
-**Describe the solution you'd like**
-A clear and concise description of what you want to happen.
-
-**Describe alternatives you've considered**
-A clear and concise description of any alternative solutions or features you've considered.
-
-**Additional context**
-Add any other context or screenshots about the feature request here.
-
-**Site URL**
-Add the site URL here if this is a site request.
-
-**Does the site have an API?**
-If the site has an API please put the link here. If unknown write it.
+---
+name: Feature request
+about: Suggest an idea for this project
+title: "[REQUEST] "
+labels: enhancement
+assignees: Official-Husko
+
+---
+
+**Is your feature request related to a problem? Please describe.**
+A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
+
+**Describe the solution you'd like**
+A clear and concise description of what you want to happen.
+
+**Describe alternatives you've considered**
+A clear and concise description of any alternative solutions or features you've considered.
+
+**Additional context**
+Add any other context or screenshots about the feature request here.
+
+**Site URL**
+Add the site URL here if this is a site request.
+
+**Does the site have an API?**
+If the site has an API please put the link here. If unknown write it.
diff --git a/.github/workflows/build_windows.yml b/.github/workflows/build_windows.yml
new file mode 100644
index 0000000..e264154
--- /dev/null
+++ b/.github/workflows/build_windows.yml
@@ -0,0 +1,47 @@
+name: Build Windows Executable
+
+on:
+ push:
+ branches:
+ - dev # Adjust branch name as needed
+
+jobs:
+ build:
+ runs-on: windows-latest
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: 3.12.4 # Specify the Python version you need
+
+ - name: Create virtual environment and build
+ shell: bash
+ run: |
+ python -m venv .env
+ source .env/Scripts/activate
+ python -m pip install --upgrade pip
+ pip install pyinstaller
+ pip install -r requirements.txt # Replace with your requirements file if exists
+
+ pyinstaller --paths .env/Lib/site-packages \
+ --hidden-import requests \
+ --hidden-import inquirer \
+ --hidden-import alive_progress \
+ --hidden-import termcolor \
+ --hidden-import xmltodict \
+ --add-data=".env/Lib/site-packages/grapheme/data/*;grapheme/data/" \
+ --onefile \
+ --icon "icon.ico" \
+ --console \
+ --name "NN-Downloader" \
+ main.py
+
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ name: nn-downloader-windows
+ path: dist/NN-Downloader.exe # Adjust the path to your executable relative to the root of the repository
diff --git a/.gitignore b/.gitignore
index d387072..f174f52 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,14 +1,7 @@
-config.json
-__pycache__/
-*.spec
-dist/
-build/
-media/
-.env/
-testing_accounts.txt
-old_config.json
-db/
-outdated
-runtime.log
-config.json.dev
-Build Release.bat
+/.env/
+/media/
+/db/
+config.json
+config.json.dev
+old_config.json
+testing_accounts.txt
diff --git a/.vscode/launch.json b/.vscode/launch.json
index 0c2aacc..e9b4361 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -1,16 +1,16 @@
-{
- // Use IntelliSense to learn about possible attributes.
- // Hover to view descriptions of existing attributes.
- // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
- "version": "0.2.0",
- "configurations": [
- {
- "name": "Python: Current File",
- "type": "python",
- "request": "launch",
- "program": "${workspaceFolder}\\main.py",
- "console": "integratedTerminal",
- "justMyCode": true
- }
- ]
+{
+ // Use IntelliSense to learn about possible attributes.
+ // Hover to view descriptions of existing attributes.
+ // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
+ "version": "0.2.0",
+ "configurations": [
+ {
+ "name": "Python: Current File",
+ "type": "python",
+ "request": "launch",
+ "program": "${workspaceFolder}/main.py",
+ "console": "integratedTerminal",
+ "justMyCode": true
+ }
+ ]
}
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
index a612ad9..76a17d7 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,373 +1,373 @@
-Mozilla Public License Version 2.0
-==================================
-
-1. Definitions
---------------
-
-1.1. "Contributor"
- means each individual or legal entity that creates, contributes to
- the creation of, or owns Covered Software.
-
-1.2. "Contributor Version"
- means the combination of the Contributions of others (if any) used
- by a Contributor and that particular Contributor's Contribution.
-
-1.3. "Contribution"
- means Covered Software of a particular Contributor.
-
-1.4. "Covered Software"
- means Source Code Form to which the initial Contributor has attached
- the notice in Exhibit A, the Executable Form of such Source Code
- Form, and Modifications of such Source Code Form, in each case
- including portions thereof.
-
-1.5. "Incompatible With Secondary Licenses"
- means
-
- (a) that the initial Contributor has attached the notice described
- in Exhibit B to the Covered Software; or
-
- (b) that the Covered Software was made available under the terms of
- version 1.1 or earlier of the License, but not also under the
- terms of a Secondary License.
-
-1.6. "Executable Form"
- means any form of the work other than Source Code Form.
-
-1.7. "Larger Work"
- means a work that combines Covered Software with other material, in
- a separate file or files, that is not Covered Software.
-
-1.8. "License"
- means this document.
-
-1.9. "Licensable"
- means having the right to grant, to the maximum extent possible,
- whether at the time of the initial grant or subsequently, any and
- all of the rights conveyed by this License.
-
-1.10. "Modifications"
- means any of the following:
-
- (a) any file in Source Code Form that results from an addition to,
- deletion from, or modification of the contents of Covered
- Software; or
-
- (b) any new file in Source Code Form that contains any Covered
- Software.
-
-1.11. "Patent Claims" of a Contributor
- means any patent claim(s), including without limitation, method,
- process, and apparatus claims, in any patent Licensable by such
- Contributor that would be infringed, but for the grant of the
- License, by the making, using, selling, offering for sale, having
- made, import, or transfer of either its Contributions or its
- Contributor Version.
-
-1.12. "Secondary License"
- means either the GNU General Public License, Version 2.0, the GNU
- Lesser General Public License, Version 2.1, the GNU Affero General
- Public License, Version 3.0, or any later versions of those
- licenses.
-
-1.13. "Source Code Form"
- means the form of the work preferred for making modifications.
-
-1.14. "You" (or "Your")
- means an individual or a legal entity exercising rights under this
- License. For legal entities, "You" includes any entity that
- controls, is controlled by, or is under common control with You. For
- purposes of this definition, "control" means (a) the power, direct
- or indirect, to cause the direction or management of such entity,
- whether by contract or otherwise, or (b) ownership of more than
- fifty percent (50%) of the outstanding shares or beneficial
- ownership of such entity.
-
-2. License Grants and Conditions
---------------------------------
-
-2.1. Grants
-
-Each Contributor hereby grants You a world-wide, royalty-free,
-non-exclusive license:
-
-(a) under intellectual property rights (other than patent or trademark)
- Licensable by such Contributor to use, reproduce, make available,
- modify, display, perform, distribute, and otherwise exploit its
- Contributions, either on an unmodified basis, with Modifications, or
- as part of a Larger Work; and
-
-(b) under Patent Claims of such Contributor to make, use, sell, offer
- for sale, have made, import, and otherwise transfer either its
- Contributions or its Contributor Version.
-
-2.2. Effective Date
-
-The licenses granted in Section 2.1 with respect to any Contribution
-become effective for each Contribution on the date the Contributor first
-distributes such Contribution.
-
-2.3. Limitations on Grant Scope
-
-The licenses granted in this Section 2 are the only rights granted under
-this License. No additional rights or licenses will be implied from the
-distribution or licensing of Covered Software under this License.
-Notwithstanding Section 2.1(b) above, no patent license is granted by a
-Contributor:
-
-(a) for any code that a Contributor has removed from Covered Software;
- or
-
-(b) for infringements caused by: (i) Your and any other third party's
- modifications of Covered Software, or (ii) the combination of its
- Contributions with other software (except as part of its Contributor
- Version); or
-
-(c) under Patent Claims infringed by Covered Software in the absence of
- its Contributions.
-
-This License does not grant any rights in the trademarks, service marks,
-or logos of any Contributor (except as may be necessary to comply with
-the notice requirements in Section 3.4).
-
-2.4. Subsequent Licenses
-
-No Contributor makes additional grants as a result of Your choice to
-distribute the Covered Software under a subsequent version of this
-License (see Section 10.2) or under the terms of a Secondary License (if
-permitted under the terms of Section 3.3).
-
-2.5. Representation
-
-Each Contributor represents that the Contributor believes its
-Contributions are its original creation(s) or it has sufficient rights
-to grant the rights to its Contributions conveyed by this License.
-
-2.6. Fair Use
-
-This License is not intended to limit any rights You have under
-applicable copyright doctrines of fair use, fair dealing, or other
-equivalents.
-
-2.7. Conditions
-
-Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
-in Section 2.1.
-
-3. Responsibilities
--------------------
-
-3.1. Distribution of Source Form
-
-All distribution of Covered Software in Source Code Form, including any
-Modifications that You create or to which You contribute, must be under
-the terms of this License. You must inform recipients that the Source
-Code Form of the Covered Software is governed by the terms of this
-License, and how they can obtain a copy of this License. You may not
-attempt to alter or restrict the recipients' rights in the Source Code
-Form.
-
-3.2. Distribution of Executable Form
-
-If You distribute Covered Software in Executable Form then:
-
-(a) such Covered Software must also be made available in Source Code
- Form, as described in Section 3.1, and You must inform recipients of
- the Executable Form how they can obtain a copy of such Source Code
- Form by reasonable means in a timely manner, at a charge no more
- than the cost of distribution to the recipient; and
-
-(b) You may distribute such Executable Form under the terms of this
- License, or sublicense it under different terms, provided that the
- license for the Executable Form does not attempt to limit or alter
- the recipients' rights in the Source Code Form under this License.
-
-3.3. Distribution of a Larger Work
-
-You may create and distribute a Larger Work under terms of Your choice,
-provided that You also comply with the requirements of this License for
-the Covered Software. If the Larger Work is a combination of Covered
-Software with a work governed by one or more Secondary Licenses, and the
-Covered Software is not Incompatible With Secondary Licenses, this
-License permits You to additionally distribute such Covered Software
-under the terms of such Secondary License(s), so that the recipient of
-the Larger Work may, at their option, further distribute the Covered
-Software under the terms of either this License or such Secondary
-License(s).
-
-3.4. Notices
-
-You may not remove or alter the substance of any license notices
-(including copyright notices, patent notices, disclaimers of warranty,
-or limitations of liability) contained within the Source Code Form of
-the Covered Software, except that You may alter any license notices to
-the extent required to remedy known factual inaccuracies.
-
-3.5. Application of Additional Terms
-
-You may choose to offer, and to charge a fee for, warranty, support,
-indemnity or liability obligations to one or more recipients of Covered
-Software. However, You may do so only on Your own behalf, and not on
-behalf of any Contributor. You must make it absolutely clear that any
-such warranty, support, indemnity, or liability obligation is offered by
-You alone, and You hereby agree to indemnify every Contributor for any
-liability incurred by such Contributor as a result of warranty, support,
-indemnity or liability terms You offer. You may include additional
-disclaimers of warranty and limitations of liability specific to any
-jurisdiction.
-
-4. Inability to Comply Due to Statute or Regulation
----------------------------------------------------
-
-If it is impossible for You to comply with any of the terms of this
-License with respect to some or all of the Covered Software due to
-statute, judicial order, or regulation then You must: (a) comply with
-the terms of this License to the maximum extent possible; and (b)
-describe the limitations and the code they affect. Such description must
-be placed in a text file included with all distributions of the Covered
-Software under this License. Except to the extent prohibited by statute
-or regulation, such description must be sufficiently detailed for a
-recipient of ordinary skill to be able to understand it.
-
-5. Termination
---------------
-
-5.1. The rights granted under this License will terminate automatically
-if You fail to comply with any of its terms. However, if You become
-compliant, then the rights granted under this License from a particular
-Contributor are reinstated (a) provisionally, unless and until such
-Contributor explicitly and finally terminates Your grants, and (b) on an
-ongoing basis, if such Contributor fails to notify You of the
-non-compliance by some reasonable means prior to 60 days after You have
-come back into compliance. Moreover, Your grants from a particular
-Contributor are reinstated on an ongoing basis if such Contributor
-notifies You of the non-compliance by some reasonable means, this is the
-first time You have received notice of non-compliance with this License
-from such Contributor, and You become compliant prior to 30 days after
-Your receipt of the notice.
-
-5.2. If You initiate litigation against any entity by asserting a patent
-infringement claim (excluding declaratory judgment actions,
-counter-claims, and cross-claims) alleging that a Contributor Version
-directly or indirectly infringes any patent, then the rights granted to
-You by any and all Contributors for the Covered Software under Section
-2.1 of this License shall terminate.
-
-5.3. In the event of termination under Sections 5.1 or 5.2 above, all
-end user license agreements (excluding distributors and resellers) which
-have been validly granted by You or Your distributors under this License
-prior to termination shall survive termination.
-
-************************************************************************
-* *
-* 6. Disclaimer of Warranty *
-* ------------------------- *
-* *
-* Covered Software is provided under this License on an "as is" *
-* basis, without warranty of any kind, either expressed, implied, or *
-* statutory, including, without limitation, warranties that the *
-* Covered Software is free of defects, merchantable, fit for a *
-* particular purpose or non-infringing. The entire risk as to the *
-* quality and performance of the Covered Software is with You. *
-* Should any Covered Software prove defective in any respect, You *
-* (not any Contributor) assume the cost of any necessary servicing, *
-* repair, or correction. This disclaimer of warranty constitutes an *
-* essential part of this License. No use of any Covered Software is *
-* authorized under this License except under this disclaimer. *
-* *
-************************************************************************
-
-************************************************************************
-* *
-* 7. Limitation of Liability *
-* -------------------------- *
-* *
-* Under no circumstances and under no legal theory, whether tort *
-* (including negligence), contract, or otherwise, shall any *
-* Contributor, or anyone who distributes Covered Software as *
-* permitted above, be liable to You for any direct, indirect, *
-* special, incidental, or consequential damages of any character *
-* including, without limitation, damages for lost profits, loss of *
-* goodwill, work stoppage, computer failure or malfunction, or any *
-* and all other commercial damages or losses, even if such party *
-* shall have been informed of the possibility of such damages. This *
-* limitation of liability shall not apply to liability for death or *
-* personal injury resulting from such party's negligence to the *
-* extent applicable law prohibits such limitation. Some *
-* jurisdictions do not allow the exclusion or limitation of *
-* incidental or consequential damages, so this exclusion and *
-* limitation may not apply to You. *
-* *
-************************************************************************
-
-8. Litigation
--------------
-
-Any litigation relating to this License may be brought only in the
-courts of a jurisdiction where the defendant maintains its principal
-place of business and such litigation shall be governed by laws of that
-jurisdiction, without reference to its conflict-of-law provisions.
-Nothing in this Section shall prevent a party's ability to bring
-cross-claims or counter-claims.
-
-9. Miscellaneous
-----------------
-
-This License represents the complete agreement concerning the subject
-matter hereof. If any provision of this License is held to be
-unenforceable, such provision shall be reformed only to the extent
-necessary to make it enforceable. Any law or regulation which provides
-that the language of a contract shall be construed against the drafter
-shall not be used to construe this License against a Contributor.
-
-10. Versions of the License
----------------------------
-
-10.1. New Versions
-
-Mozilla Foundation is the license steward. Except as provided in Section
-10.3, no one other than the license steward has the right to modify or
-publish new versions of this License. Each version will be given a
-distinguishing version number.
-
-10.2. Effect of New Versions
-
-You may distribute the Covered Software under the terms of the version
-of the License under which You originally received the Covered Software,
-or under the terms of any subsequent version published by the license
-steward.
-
-10.3. Modified Versions
-
-If you create software not governed by this License, and you want to
-create a new license for such software, you may create and use a
-modified version of this License if you rename the license and remove
-any references to the name of the license steward (except to note that
-such modified license differs from this License).
-
-10.4. Distributing Source Code Form that is Incompatible With Secondary
-Licenses
-
-If You choose to distribute Source Code Form that is Incompatible With
-Secondary Licenses under the terms of this version of the License, the
-notice described in Exhibit B of this License must be attached.
-
-Exhibit A - Source Code Form License Notice
--------------------------------------------
-
- This Source Code Form is subject to the terms of the Mozilla Public
- License, v. 2.0. If a copy of the MPL was not distributed with this
- file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-If it is not possible or desirable to put the notice in a particular
-file, then You may include the notice in a location (such as a LICENSE
-file in a relevant directory) where a recipient would be likely to look
-for such a notice.
-
-You may add additional accurate notices of copyright ownership.
-
-Exhibit B - "Incompatible With Secondary Licenses" Notice
----------------------------------------------------------
-
- This Source Code Form is "Incompatible With Secondary Licenses", as
- defined by the Mozilla Public License, v. 2.0.
+Mozilla Public License Version 2.0
+==================================
+
+1. Definitions
+--------------
+
+1.1. "Contributor"
+ means each individual or legal entity that creates, contributes to
+ the creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+ means the combination of the Contributions of others (if any) used
+ by a Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+ means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+ means Source Code Form to which the initial Contributor has attached
+ the notice in Exhibit A, the Executable Form of such Source Code
+ Form, and Modifications of such Source Code Form, in each case
+ including portions thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+ means
+
+ (a) that the initial Contributor has attached the notice described
+ in Exhibit B to the Covered Software; or
+
+ (b) that the Covered Software was made available under the terms of
+ version 1.1 or earlier of the License, but not also under the
+ terms of a Secondary License.
+
+1.6. "Executable Form"
+ means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+ means a work that combines Covered Software with other material, in
+ a separate file or files, that is not Covered Software.
+
+1.8. "License"
+ means this document.
+
+1.9. "Licensable"
+ means having the right to grant, to the maximum extent possible,
+ whether at the time of the initial grant or subsequently, any and
+ all of the rights conveyed by this License.
+
+1.10. "Modifications"
+ means any of the following:
+
+ (a) any file in Source Code Form that results from an addition to,
+ deletion from, or modification of the contents of Covered
+ Software; or
+
+ (b) any new file in Source Code Form that contains any Covered
+ Software.
+
+1.11. "Patent Claims" of a Contributor
+ means any patent claim(s), including without limitation, method,
+ process, and apparatus claims, in any patent Licensable by such
+ Contributor that would be infringed, but for the grant of the
+ License, by the making, using, selling, offering for sale, having
+ made, import, or transfer of either its Contributions or its
+ Contributor Version.
+
+1.12. "Secondary License"
+ means either the GNU General Public License, Version 2.0, the GNU
+ Lesser General Public License, Version 2.1, the GNU Affero General
+ Public License, Version 3.0, or any later versions of those
+ licenses.
+
+1.13. "Source Code Form"
+ means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+ means an individual or a legal entity exercising rights under this
+ License. For legal entities, "You" includes any entity that
+ controls, is controlled by, or is under common control with You. For
+ purposes of this definition, "control" means (a) the power, direct
+ or indirect, to cause the direction or management of such entity,
+ whether by contract or otherwise, or (b) ownership of more than
+ fifty percent (50%) of the outstanding shares or beneficial
+ ownership of such entity.
+
+2. License Grants and Conditions
+--------------------------------
+
+2.1. Grants
+
+Each Contributor hereby grants You a world-wide, royalty-free,
+non-exclusive license:
+
+(a) under intellectual property rights (other than patent or trademark)
+ Licensable by such Contributor to use, reproduce, make available,
+ modify, display, perform, distribute, and otherwise exploit its
+ Contributions, either on an unmodified basis, with Modifications, or
+ as part of a Larger Work; and
+
+(b) under Patent Claims of such Contributor to make, use, sell, offer
+ for sale, have made, import, and otherwise transfer either its
+ Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+The licenses granted in Section 2.1 with respect to any Contribution
+become effective for each Contribution on the date the Contributor first
+distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+The licenses granted in this Section 2 are the only rights granted under
+this License. No additional rights or licenses will be implied from the
+distribution or licensing of Covered Software under this License.
+Notwithstanding Section 2.1(b) above, no patent license is granted by a
+Contributor:
+
+(a) for any code that a Contributor has removed from Covered Software;
+ or
+
+(b) for infringements caused by: (i) Your and any other third party's
+ modifications of Covered Software, or (ii) the combination of its
+ Contributions with other software (except as part of its Contributor
+ Version); or
+
+(c) under Patent Claims infringed by Covered Software in the absence of
+ its Contributions.
+
+This License does not grant any rights in the trademarks, service marks,
+or logos of any Contributor (except as may be necessary to comply with
+the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+No Contributor makes additional grants as a result of Your choice to
+distribute the Covered Software under a subsequent version of this
+License (see Section 10.2) or under the terms of a Secondary License (if
+permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+Each Contributor represents that the Contributor believes its
+Contributions are its original creation(s) or it has sufficient rights
+to grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+This License is not intended to limit any rights You have under
+applicable copyright doctrines of fair use, fair dealing, or other
+equivalents.
+
+2.7. Conditions
+
+Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
+in Section 2.1.
+
+3. Responsibilities
+-------------------
+
+3.1. Distribution of Source Form
+
+All distribution of Covered Software in Source Code Form, including any
+Modifications that You create or to which You contribute, must be under
+the terms of this License. You must inform recipients that the Source
+Code Form of the Covered Software is governed by the terms of this
+License, and how they can obtain a copy of this License. You may not
+attempt to alter or restrict the recipients' rights in the Source Code
+Form.
+
+3.2. Distribution of Executable Form
+
+If You distribute Covered Software in Executable Form then:
+
+(a) such Covered Software must also be made available in Source Code
+ Form, as described in Section 3.1, and You must inform recipients of
+ the Executable Form how they can obtain a copy of such Source Code
+ Form by reasonable means in a timely manner, at a charge no more
+ than the cost of distribution to the recipient; and
+
+(b) You may distribute such Executable Form under the terms of this
+ License, or sublicense it under different terms, provided that the
+ license for the Executable Form does not attempt to limit or alter
+ the recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+You may create and distribute a Larger Work under terms of Your choice,
+provided that You also comply with the requirements of this License for
+the Covered Software. If the Larger Work is a combination of Covered
+Software with a work governed by one or more Secondary Licenses, and the
+Covered Software is not Incompatible With Secondary Licenses, this
+License permits You to additionally distribute such Covered Software
+under the terms of such Secondary License(s), so that the recipient of
+the Larger Work may, at their option, further distribute the Covered
+Software under the terms of either this License or such Secondary
+License(s).
+
+3.4. Notices
+
+You may not remove or alter the substance of any license notices
+(including copyright notices, patent notices, disclaimers of warranty,
+or limitations of liability) contained within the Source Code Form of
+the Covered Software, except that You may alter any license notices to
+the extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+You may choose to offer, and to charge a fee for, warranty, support,
+indemnity or liability obligations to one or more recipients of Covered
+Software. However, You may do so only on Your own behalf, and not on
+behalf of any Contributor. You must make it absolutely clear that any
+such warranty, support, indemnity, or liability obligation is offered by
+You alone, and You hereby agree to indemnify every Contributor for any
+liability incurred by such Contributor as a result of warranty, support,
+indemnity or liability terms You offer. You may include additional
+disclaimers of warranty and limitations of liability specific to any
+jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+---------------------------------------------------
+
+If it is impossible for You to comply with any of the terms of this
+License with respect to some or all of the Covered Software due to
+statute, judicial order, or regulation then You must: (a) comply with
+the terms of this License to the maximum extent possible; and (b)
+describe the limitations and the code they affect. Such description must
+be placed in a text file included with all distributions of the Covered
+Software under this License. Except to the extent prohibited by statute
+or regulation, such description must be sufficiently detailed for a
+recipient of ordinary skill to be able to understand it.
+
+5. Termination
+--------------
+
+5.1. The rights granted under this License will terminate automatically
+if You fail to comply with any of its terms. However, if You become
+compliant, then the rights granted under this License from a particular
+Contributor are reinstated (a) provisionally, unless and until such
+Contributor explicitly and finally terminates Your grants, and (b) on an
+ongoing basis, if such Contributor fails to notify You of the
+non-compliance by some reasonable means prior to 60 days after You have
+come back into compliance. Moreover, Your grants from a particular
+Contributor are reinstated on an ongoing basis if such Contributor
+notifies You of the non-compliance by some reasonable means, this is the
+first time You have received notice of non-compliance with this License
+from such Contributor, and You become compliant prior to 30 days after
+Your receipt of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+infringement claim (excluding declaratory judgment actions,
+counter-claims, and cross-claims) alleging that a Contributor Version
+directly or indirectly infringes any patent, then the rights granted to
+You by any and all Contributors for the Covered Software under Section
+2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all
+end user license agreements (excluding distributors and resellers) which
+have been validly granted by You or Your distributors under this License
+prior to termination shall survive termination.
+
+************************************************************************
+* *
+* 6. Disclaimer of Warranty *
+* ------------------------- *
+* *
+* Covered Software is provided under this License on an "as is" *
+* basis, without warranty of any kind, either expressed, implied, or *
+* statutory, including, without limitation, warranties that the *
+* Covered Software is free of defects, merchantable, fit for a *
+* particular purpose or non-infringing. The entire risk as to the *
+* quality and performance of the Covered Software is with You. *
+* Should any Covered Software prove defective in any respect, You *
+* (not any Contributor) assume the cost of any necessary servicing, *
+* repair, or correction. This disclaimer of warranty constitutes an *
+* essential part of this License. No use of any Covered Software is *
+* authorized under this License except under this disclaimer. *
+* *
+************************************************************************
+
+************************************************************************
+* *
+* 7. Limitation of Liability *
+* -------------------------- *
+* *
+* Under no circumstances and under no legal theory, whether tort *
+* (including negligence), contract, or otherwise, shall any *
+* Contributor, or anyone who distributes Covered Software as *
+* permitted above, be liable to You for any direct, indirect, *
+* special, incidental, or consequential damages of any character *
+* including, without limitation, damages for lost profits, loss of *
+* goodwill, work stoppage, computer failure or malfunction, or any *
+* and all other commercial damages or losses, even if such party *
+* shall have been informed of the possibility of such damages. This *
+* limitation of liability shall not apply to liability for death or *
+* personal injury resulting from such party's negligence to the *
+* extent applicable law prohibits such limitation. Some *
+* jurisdictions do not allow the exclusion or limitation of *
+* incidental or consequential damages, so this exclusion and *
+* limitation may not apply to You. *
+* *
+************************************************************************
+
+8. Litigation
+-------------
+
+Any litigation relating to this License may be brought only in the
+courts of a jurisdiction where the defendant maintains its principal
+place of business and such litigation shall be governed by laws of that
+jurisdiction, without reference to its conflict-of-law provisions.
+Nothing in this Section shall prevent a party's ability to bring
+cross-claims or counter-claims.
+
+9. Miscellaneous
+----------------
+
+This License represents the complete agreement concerning the subject
+matter hereof. If any provision of this License is held to be
+unenforceable, such provision shall be reformed only to the extent
+necessary to make it enforceable. Any law or regulation which provides
+that the language of a contract shall be construed against the drafter
+shall not be used to construe this License against a Contributor.
+
+10. Versions of the License
+---------------------------
+
+10.1. New Versions
+
+Mozilla Foundation is the license steward. Except as provided in Section
+10.3, no one other than the license steward has the right to modify or
+publish new versions of this License. Each version will be given a
+distinguishing version number.
+
+10.2. Effect of New Versions
+
+You may distribute the Covered Software under the terms of the version
+of the License under which You originally received the Covered Software,
+or under the terms of any subsequent version published by the license
+steward.
+
+10.3. Modified Versions
+
+If you create software not governed by this License, and you want to
+create a new license for such software, you may create and use a
+modified version of this License if you rename the license and remove
+any references to the name of the license steward (except to note that
+such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+Licenses
+
+If You choose to distribute Source Code Form that is Incompatible With
+Secondary Licenses under the terms of this version of the License, the
+notice described in Exhibit B of this License must be attached.
+
+Exhibit A - Source Code Form License Notice
+-------------------------------------------
+
+ This Source Code Form is subject to the terms of the Mozilla Public
+ License, v. 2.0. If a copy of the MPL was not distributed with this
+ file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular
+file, then You may include the notice in a location (such as a LICENSE
+file in a relevant directory) where a recipient would be likely to look
+for such a notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+---------------------------------------------------------
+
+ This Source Code Form is "Incompatible With Secondary Licenses", as
+ defined by the Mozilla Public License, v. 2.0.
diff --git a/README.md b/README.md
index 74a74a6..8c4af03 100644
--- a/README.md
+++ b/README.md
@@ -1,66 +1,71 @@
-# NN-Downloader
-
-Welcome to the successor of the [multporn image downloader v1][2] & [v2][1] and most downloaders out there regarding "NSFW" material. The NN-Downloader or Naughty-Naughty-Downloader (yes very creative I know) supports multiple sites with their official API (if available), and proxies and it's also portable.
-
-### Preview
-![preview](https://github.com/Official-Husko/NN-Downloader/blob/dev/preview/preview.gif)
-
-[Windows Download][14] | [Linux Download][21] (Thanks to [incognibro538](https://github.com/incognibro538))
-
-
-
-#### Features:
-- Portable
-- Proxy Support
-- AI Training Compatible
-- Avoid Duplicates
-
-
-
-#### Currently Supported:
-- [Rule34][3] (API)
-- [E621][4] (API)
-- [E6AI][22] (API)
-- [E926][5] (API)
-- [Furbooru][6] (API)
-- [Multporn][7]
-- [Yiffer][8]
-- [Luscious][16]
-
-
-
-[1]:https://github.com/Official-Husko/multporn-image-downloader-v2
-[2]:https://github.com/Official-Husko/multporn-image-downloader
-[3]:https://rule34.xxx
-[4]:https://e621.net/
-[5]:https://e926.net/
-[6]:https://furbooru.org/
-[7]:https://multporn.net/
-[8]:https://yiffer.xyz/
-[9]:https://theyiffgallery.com/
-[10]:https://furry.booru.org/
-[11]:https://github.com/Official-Husko/NN-Downloader/issues
-[12]:https://booru.plus/
-[13]:https://github.com/Official-Husko/NN-Downloader#currently-supported=
-[14]:https://github.com/Official-Husko/NN-Downloader/releases/latest/download/NN-Downloader.exe
-[15]:https://nhentai.net/
-[16]:https://luscious.net/
-[17]:https://www.pixiv.net/
-[18]:https://hentairead.com/
-[19]:https://rule34.art/
-[20]:https://2.multporn.net/
-[21]:https://codeload.github.com/Official-Husko/NN-Downloader/zip/refs/heads/master
-[22]:https://e6ai.net/
-
-Further sites can be added. Just open a [support ticket][11] with the URL to the site.
-
-
-
-
-#### Disclaimer
-***I am not in any way affiliated or working with these Sites. This is an unofficial project.***
-*I would suggest you use a customized Terminal.
-
-
-[//]: # (Ingore These Lines Below)
-[//]: # (Including mirror [rule34.art][19] & [2.multporn.net][20])
+# NN-Downloader
+
+Welcome to the successor of the [multporn image downloader v1][2] & [v2][1] and most downloaders out there regarding "NSFW" material. The NN-Downloader or Naughty-Naughty-Downloader (yes very creative I know) supports multiple sites with their official API (if available), and proxies and it's also portable.
+
+### Preview
+![preview](https://github.com/Official-Husko/NN-Downloader/blob/dev/preview/preview.gif)
+
+> [!NOTE]
+> This projects windows exe is built using [Github Actions][23] for full transparency on the build process.
+
+[Windows Download][14] | [Linux Download][21] (Thanks to [incognibro538](https://github.com/incognibro538))
+
+
+
+#### Features:
+- Portable
+- Proxy Support
+- AI Training Compatible
+- Avoid Duplicates
+- Fast & Efficient Downloads
+
+
+
+#### Currently Supported:
+- [Rule34][3] (API)
+- [E621][4] (API)
+- [E6AI][22] (API)
+- [E926][5] (API)
+- [Furbooru][6] (API)
+- [Multporn][7]
+- [Yiffer][8]
+- [Luscious][16]
+
+
+
+[1]: https://github.com/Official-Husko/multporn-image-downloader-v2
+[2]: https://github.com/Official-Husko/multporn-image-downloader
+[3]: https://rule34.xxx
+[4]: https://e621.net/
+[5]: https://e926.net/
+[6]: https://furbooru.org/
+[7]: https://multporn.net/
+[8]: https://yiffer.xyz/
+[9]: https://theyiffgallery.com/
+[10]: https://furry.booru.org/
+[11]: https://github.com/Official-Husko/NN-Downloader/issues
+[12]: https://booru.plus/
+[13]: https://github.com/Official-Husko/NN-Downloader#currently-supported=
+[14]: https://github.com/Official-Husko/NN-Downloader/releases/latest/download/NN-Downloader.exe
+[15]: https://nhentai.net/
+[16]: https://luscious.net/
+[17]: https://www.pixiv.net/
+[18]: https://hentairead.com/
+[19]: https://rule34.art/
+[20]: https://2.multporn.net/
+[21]: https://codeload.github.com/Official-Husko/NN-Downloader/zip/refs/heads/master
+[22]: https://e6ai.net/
+[23]: https://github.com/Official-Husko/NN-Downloader/actions
+
+Further sites can be added. Just open a [support ticket][11] with the URL to the site.
+
+
+
+
+#### Disclaimer
+***I am not in any way affiliated or working with these Sites. This is an unofficial project.***
+*I would suggest you use a customized Terminal.
+
+
+[//]: # (Ingore These Lines Below)
+[//]: # (Including mirror [rule34.art][19] & [2.multporn.net][20])
diff --git a/enable_env.bat b/enable_env.bat
deleted file mode 100644
index 898e15a..0000000
--- a/enable_env.bat
+++ /dev/null
@@ -1 +0,0 @@
-conda activate ".\.env"
\ No newline at end of file
diff --git a/main.py b/main.py
index a10653c..65d5f64 100644
--- a/main.py
+++ b/main.py
@@ -1,221 +1,242 @@
-from modules import *
-import json
-import os
-from termcolor import colored
-from time import sleep
-import sys
-import inquirer
-
-version = "1.6.3"
-
-if os.name == 'nt':
- from ctypes import windll
- windll.kernel32.SetConsoleTitleW(f"NN-Downloader | v{version}")
-
-proxy_list = []
-header = {"User-Agent":f"nn-downloader/{version} (by Official Husko on GitHub)"}
-needed_folders = ["db", "media"]
-database_list = ["e621", "e6ai", "e926", "furbooru", "rule34"]
-unsafe_chars = ["/", "\\", ":", "*", "?", "\"", "<", ">", "|", "\0", "$", "#", "@", "&", "%", "!", "`", "^", "(", ")", "{", "}", "[", "]", "=", "+", "~", ",", ";"]
-
-if sys.gettrace() is not None:
- DEBUG = True
-else:
- DEBUG = False
-
-if os.path.exists("outdated"):
- version_for_logo = colored(f"v{version}", "cyan", attrs=["blink"])
-else:
- version_for_logo = colored(f"v{version}", "cyan")
-
-logo = f"""{colored(f'''
-d8b db d8b db d8888b. .d88b. db d8b db d8b db db .d88b. .d8b. d8888b. d88888b d8888b.
-888o 88 888o 88 88 `8D .8P Y8. 88 I8I 88 888o 88 88 .8P Y8. d8' `8b 88 `8D 88' 88 `8D
-88V8o 88 88V8o 88 88 88 88 88 88 I8I 88 88V8o 88 88 88 88 88ooo88 88 88 88ooooo 88oobY'
-88 V8o88 88 V8o88 C8888D 88 88 88 88 Y8 I8I 88 88 V8o88 88 88 88 88~~~88 88 88 88~~~~~ 88`8b
-88 V888 88 V888 88 .8D `8b d8' `8b d8'8b d8' 88 V888 88booo. `8b d8' 88 88 88 .8D 88. 88 `88.
-VP V8P VP V8P Y8888D' `Y88P' `8b8' `8d8' VP V8P Y88888P `Y88P' YP YP Y8888D' Y88888P 88 YD
- {version_for_logo} | by {colored("Official-Husko", "yellow")}''', "red")}
-"""
-
-class Main():
- def main_startup():
- def clear_screen():
- if os.name == 'nt':
- os.system("cls")
- else:
- os.system("clear")
- print(colored("Checking for read and write permissions.", "green"))
-
- # Check if the process has read and write permissions
- if os.access(os.getcwd(), os.R_OK | os.W_OK):
- pass
- else:
- print(colored("The program is missing read & write permissions! Change the directory or try run as administrator.", "red"))
- sleep(300)
- sys.exit(0)
-
- print(logo)
- print("")
-
- # Check if needed folders exists else create them
- for folder in needed_folders:
- if not os.path.exists(folder):
- os.mkdir(folder)
-
-
- if os.path.exists("config.json"):
- config = Config_Manager.reader()
- oneTimeDownload = config["oneTimeDownload"]
- use_proxies = config["proxies"]
- checkForUpdates = config["checkForUpdates"]
- ai_training = config["ai_training"]
- else:
- config = Config_Manager.creator()
- print(colored("New Config file generated. Please configure it for your use case and add API keys for needed services.", "green"))
- sleep(7)
- sys.exit(0)
-
- if checkForUpdates == True:
- clear_screen()
- print(logo)
- print("")
- print(colored("Checking for Updates...", "yellow"), end='\r')
- AutoUpdate.Checker()
- clear_screen()
- print(logo)
- print("")
-
- if use_proxies == True:
- print(colored("Fetching Fresh Proxies...", "yellow"), end='\r')
- ProxyScraper.Scraper(proxy_list=proxy_list)
- print(colored(f"Fetched {len(proxy_list)} Proxies. ", "green"))
- print("")
-
- if oneTimeDownload == True:
- for database in database_list:
- with open(f"db/{database}.db", "a") as db_creator:
- db_creator.close()
-
- print(colored("What site do you want to download from?", "green"))
- questions = [
- inquirer.List('selection',
- choices=['E621', 'E6AI', 'E926', 'Furbooru', 'Luscious', 'Multporn', 'Rule34', 'Yiffer']),
- ]
- answers = inquirer.prompt(questions)
- print("")
-
- site = answers.get("selection").lower()
-
- if site in ["e621", "e6ai", "e926", "furbooru", "rule34"]:
-
- print(colored("Please enter the tags you want to use.", "green"))
- user_tags = input(">> ").lower()
- while user_tags == "":
- print(colored("Please enter the tags you want.", "red"))
- sleep(3)
- user_tags = input(">> ").lower()
- print("")
-
- print(colored("How many pages would you like to get?", "green"), colored(" (leave empty for max)", "yellow"))
- max_sites = input(">> ").lower()
- print("")
-
- if site in ["e621", "e6ai", "e926"]:
- api_user = config.get("user_credentials",{}).get(site, {}).get("apiUser", "")
- api_key = config.get("user_credentials", {}).get(site, {}).get("apiKey", "")
- if oneTimeDownload == True:
- with open(f"db/{site}.db", "r") as db_reader:
- database = db_reader.read().splitlines()
- else:
- database = False
- if api_key == "" or api_user == "":
- print(colored("Please add your API Key into the config.json", "red"))
- sleep(10)
- sys.exit(0)
- else:
- output = E6System.fetcher(user_tags=user_tags, user_blacklist=config["blacklisted_tags"], proxy_list=proxy_list, max_sites=max_sites, user_proxies=config["proxies"], api_user=api_user, api_key=api_key, header=header, db=database, site=site, ai_training=ai_training)
-
- elif site == "rule34":
- if oneTimeDownload == True:
- with open("db/rule34.db", "r") as db_reader:
- database = db_reader.read().splitlines()
- else:
- database = False
- output = RULE34.fetcher(user_tags=user_tags, user_blacklist=config["blacklisted_tags"], proxy_list=proxy_list, max_sites=max_sites, user_proxies=config["proxies"], header=header, db=database)
-
- elif site == "furbooru":
- api_key = config.get("user_credentials", {}).get(site, {}).get("apiKey", "")
- if oneTimeDownload == True:
- with open("db/furbooru.db", "r") as db_reader:
- database = db_reader.read().splitlines()
- else:
- database = False
- if api_key == "":
- print(colored("Please add your API Key into the config.json", "red"))
- sleep(5)
- else:
- output = FURBOORU.fetcher(user_tags=user_tags, user_blacklist=config["blacklisted_tags"], proxy_list=proxy_list, max_sites=max_sites, user_proxies=config["proxies"], api_key=api_key, header=header, db=database)
-
- elif site == "multporn":
- print(colored("Please enter the link. (e.g. https://multporn.net/comics/double_trouble_18)", "green"))
- URL = input(">> ")
- while URL == "":
- print(colored("Please enter a valid link.", "red"))
- sleep(1.5)
- URL = input(">> ")
- output = Multporn.Fetcher(proxy_list=proxy_list, user_proxies=config["proxies"], header=header, URL=URL)
-
- elif site == "yiffer":
- print(colored("Please enter the link. (e.g. https://yiffer.xyz/Howl & Jasper)", "green"))
- URL = input(">> ")
- while URL == "":
- print(colored("Please enter a valid link.", "red"))
- sleep(1.5)
- URL = input(">> ")
- output = Yiffer.Fetcher(proxy_list=proxy_list, user_proxies=config["proxies"], header=header, URL=URL)
-
- elif site == "luscious":
- print(colored("Please enter the link. (e.g. https://www.luscious.net/albums/bifurcation-ongoing_437722)", "green"))
- URL = input(">> ")
- while URL == "":
- print(colored("Please enter a valid link.", "red"))
- sleep(1.5)
- URL = input(">> ")
- output = Luscious.Fetcher(proxy_list=proxy_list, user_proxies=config["proxies"], header=header, URL=URL)
-
- else:
- print(colored("Site not supported. Open a ticket to request support for that site!", "red"))
- raise Exception(f"This shouldn't be possible! User tried to download from {site}.")
- Main.main_startup()
-
- status = output.get("status", "why no status man?")
- uinput = output.get("uinput", "URL overdosed :(")
- exception_str = output.get("exception", "Fuck me there was no exception.")
- extra = output.get("extra", "")
-
- if status == "ok":
- pass
-
- elif status == "error":
- print(f"{error} An error occured while downloading from {colored(site, 'yellow')}! Please report this. Exception: {colored(exception_str, 'red')}")
- error_str = f"An error occured while downloading from {site}! Please report this. Exception: {exception_str}"
- Logger.log_event(error_str, extra, uinput)
- sleep(7)
-
- else:
- print(f"{major_error} An unknown error occured while downloading from {colored(site, 'yellow')}! Please report this. Exception: {colored(exception_str, 'red')}")
- error_str = f"An unknown error occured while downloading from {site}! Please report this. Exception: {exception_str}"
- Logger.log_event(error_str, extra, uinput)
- sleep(7)
-
- # Jump back to start
- Main.main_startup()
-
-if __name__ == '__main__':
- try:
- Main.main_startup()
- except KeyboardInterrupt:
- print("User Cancelled")
- sleep(3)
- sys.exit(0)
+from modules import *
+import json
+import os
+from termcolor import colored
+from time import sleep
+import sys
+import inquirer
+
+version = "1.6.4"
+
+if os.name == 'nt':
+ from ctypes import windll
+ windll.kernel32.SetConsoleTitleW(f"NN-Downloader | v{version}")
+
+proxy_list = []
+header = {"User-Agent":f"nn-downloader/{version} (by Official Husko on GitHub)"}
+needed_folders = ["db", "media"]
+database_list = ["e621", "e6ai", "e926", "furbooru", "rule34"]
+unsafe_chars = ["/", "\\", ":", "*", "?", "\"", "<", ">", "|", "\0", "$", "#", "@", "&", "%", "!", "`", "^", "(", ")", "{", "}", "[", "]", "=", "+", "~", ",", ";", "~"]
+
+if sys.gettrace() is not None:
+ DEBUG = True
+else:
+ DEBUG = False
+
+if os.path.exists("outdated"):
+ version_for_logo = colored(f"v{version}", "cyan", attrs=["blink"])
+else:
+ version_for_logo = colored(f"v{version}", "cyan")
+
+logo = f"""{colored(f'''
+d8b db d8b db d8888b. .d88b. db d8b db d8b db db .d88b. .d8b. d8888b. d88888b d8888b.
+888o 88 888o 88 88 `8D .8P Y8. 88 I8I 88 888o 88 88 .8P Y8. d8' `8b 88 `8D 88' 88 `8D
+88V8o 88 88V8o 88 88 88 88 88 88 I8I 88 88V8o 88 88 88 88 88ooo88 88 88 88ooooo 88oobY'
+88 V8o88 88 V8o88 C8888D 88 88 88 88 Y8 I8I 88 88 V8o88 88 88 88 88~~~88 88 88 88~~~~~ 88`8b
+88 V888 88 V888 88 .8D `8b d8' `8b d8'8b d8' 88 V888 88booo. `8b d8' 88 88 88 .8D 88. 88 `88.
+VP V8P VP V8P Y8888D' `Y88P' `8b8' `8d8' VP V8P Y88888P `Y88P' YP YP Y8888D' Y88888P 88 YD
+ {version_for_logo} | by {colored("Official-Husko", "yellow")}''', "red")}
+"""
+
+class Main():
+ def main_startup():
+ def clear_screen():
+ if os.name == 'nt':
+ os.system("cls")
+ else:
+ os.system("clear")
+ print(colored("Checking for read and write permissions.", "green"))
+
+ # Check if the process has read and write permissions
+ if os.access(os.getcwd(), os.R_OK | os.W_OK):
+ pass
+ else:
+ print(colored("The program is missing read & write permissions! Change the directory or try run as administrator.", "red"))
+ sleep(300)
+ sys.exit(0)
+
+ print(logo)
+ print("")
+
+ # Check if needed folders exists else create them
+ for folder in needed_folders:
+ if not os.path.exists(folder):
+ os.mkdir(folder)
+
+
+ if os.path.exists("config.json"):
+ config = Config_Manager.reader()
+ oneTimeDownload = config["oneTimeDownload"]
+ use_proxies = config["proxies"]
+ checkForUpdates = config["checkForUpdates"]
+ ai_training = config["ai_training"]
+ else:
+ config = Config_Manager.creator()
+ print(colored("New Config file generated. Please configure it for your use case and add API keys for needed services.", "green"))
+ sleep(7)
+ sys.exit(0)
+
+ if checkForUpdates == True:
+ clear_screen()
+ print(logo)
+ print("")
+ print(colored("Checking for Updates...", "yellow"), end='\r')
+ AutoUpdate.Checker()
+ clear_screen()
+ print(logo)
+ print("")
+
+ if use_proxies == True:
+ print(colored("Fetching Fresh Proxies...", "yellow"), end='\r')
+ ProxyScraper.Scraper(proxy_list=proxy_list)
+ print(colored(f"Fetched {len(proxy_list)} Proxies. ", "green"))
+ print("")
+
+ if oneTimeDownload == True:
+ for database in database_list:
+ with open(f"db/{database}.db", "a") as db_creator:
+ db_creator.close()
+
+ print(colored("What site do you want to download from?", "green"))
+ questions = [
+ inquirer.List('selection',
+ choices=['E621', 'E6AI', 'E926', 'Furbooru', 'Luscious', 'Multporn', 'Rule34', 'Yiffer']),
+ ]
+ answers = inquirer.prompt(questions)
+ print("")
+
+ site = answers.get("selection").lower()
+
+ if site in ["e621", "e6ai", "e926", "furbooru", "rule34"]:
+
+ print(colored("Please enter the tags you want to use.", "green"))
+ user_tags = input(">> ").lower()
+
+ # Check to make sure there are not more than 40 tags
+ user_tags_split = user_tags.split()
+ tags_count = len(user_tags_split)
+
+ if site in ["e621", "e6ai", "e926"]:
+ while user_tags == "" or tags_count > 40:
+ if user_tags == "":
+ print(colored("Please enter the tags you want.", "red"))
+ else:
+ print(colored(f"Sorry, {site.upper()} does not allow more than 40 tags.", "red"))
+ print(colored(f"You entered {tags_count} tags.", "red"))
+
+ sleep(3)
+ user_tags = input(">> ").lower()
+
+ user_tags_split = user_tags.split()
+ tags_count = len(user_tags_split)
+ else:
+ while user_tags == "":
+ print(colored("Please enter the tags you want.", "red"))
+ sleep(3)
+ user_tags = input(">> ").lower()
+
+
+ print("")
+
+ print(colored("How many pages would you like to get?", "green"), colored(" (leave empty for max)", "yellow"))
+ max_sites = input(">> ").lower()
+ print("")
+
+ if site in ["e621", "e6ai", "e926"]:
+ api_user = config.get("user_credentials",{}).get(site, {}).get("apiUser", "")
+ api_key = config.get("user_credentials", {}).get(site, {}).get("apiKey", "")
+ if oneTimeDownload == True:
+ with open(f"db/{site}.db", "r") as db_reader:
+ database = db_reader.read().splitlines()
+ else:
+ database = False
+ if api_key == "" or api_user == "":
+ print(colored("Please add your API Key into the config.json", "red"))
+ sleep(10)
+ sys.exit(0)
+ else:
+ output = E6System.fetcher(user_tags=user_tags, user_blacklist=config["blacklisted_tags"], proxy_list=proxy_list, max_sites=max_sites, user_proxies=config["proxies"], api_user=api_user, api_key=api_key, header=header, db=database, site=site, ai_training=ai_training)
+
+ elif site == "rule34":
+ if oneTimeDownload == True:
+ with open("db/rule34.db", "r") as db_reader:
+ database = db_reader.read().splitlines()
+ else:
+ database = False
+ output = RULE34.fetcher(user_tags=user_tags, user_blacklist=config["blacklisted_tags"], proxy_list=proxy_list, max_sites=max_sites, user_proxies=config["proxies"], header=header, db=database)
+
+ elif site == "furbooru":
+ api_key = config.get("user_credentials", {}).get(site, {}).get("apiKey", "")
+ if oneTimeDownload == True:
+ with open("db/furbooru.db", "r") as db_reader:
+ database = db_reader.read().splitlines()
+ else:
+ database = False
+ if api_key == "":
+ print(colored("Please add your API Key into the config.json", "red"))
+ sleep(5)
+ else:
+ output = FURBOORU.fetcher(user_tags=user_tags, user_blacklist=config["blacklisted_tags"], proxy_list=proxy_list, max_sites=max_sites, user_proxies=config["proxies"], api_key=api_key, header=header, db=database)
+
+ elif site == "multporn":
+ print(colored("Please enter the link. (e.g. https://multporn.net/comics/double_trouble_18)", "green"))
+ URL = input(">> ")
+ while URL == "":
+ print(colored("Please enter a valid link.", "red"))
+ sleep(1.5)
+ URL = input(">> ")
+ output = Multporn.Fetcher(proxy_list=proxy_list, user_proxies=config["proxies"], header=header, URL=URL)
+
+ elif site == "yiffer":
+ print(colored("Please enter the link. (e.g. https://yiffer.xyz/Howl & Jasper)", "green"))
+ URL = input(">> ")
+ while URL == "":
+ print(colored("Please enter a valid link.", "red"))
+ sleep(1.5)
+ URL = input(">> ")
+ output = Yiffer.Fetcher(proxy_list=proxy_list, user_proxies=config["proxies"], header=header, URL=URL)
+
+ elif site == "luscious":
+ print(colored("Please enter the link. (e.g. https://www.luscious.net/albums/bifurcation-ongoing_437722)", "green"))
+ URL = input(">> ")
+ while URL == "":
+ print(colored("Please enter a valid link.", "red"))
+ sleep(1.5)
+ URL = input(">> ")
+ output = Luscious.Fetcher(proxy_list=proxy_list, user_proxies=config["proxies"], header=header, URL=URL)
+
+ else:
+ print(colored("Site not supported. Open a ticket to request support for that site!", "red"))
+ raise Exception(f"This shouldn't be possible! User tried to download from {site}.")
+ Main.main_startup()
+
+ status = output.get("status", "why no status man?")
+ uinput = output.get("uinput", "URL overdosed :(")
+ exception_str = output.get("exception", "Fuck me there was no exception.")
+ extra = output.get("extra", "")
+
+ if status == "ok":
+ pass
+
+ elif status == "error":
+ print(f"{error} An error occured while downloading from {colored(site, 'yellow')}! Please report this. Exception: {colored(exception_str, 'red')}")
+ error_str = f"An error occured while downloading from {site}! Please report this. Exception: {exception_str}"
+ Logger.log_event(error_str, extra, uinput)
+ sleep(7)
+
+ else:
+ print(f"{major_error} An unknown error occured while downloading from {colored(site, 'yellow')}! Please report this. Exception: {colored(exception_str, 'red')}")
+ error_str = f"An unknown error occured while downloading from {site}! Please report this. Exception: {exception_str}"
+ Logger.log_event(error_str, extra, uinput)
+ sleep(7)
+
+ # Jump back to start
+ Main.main_startup()
+
+if __name__ == '__main__':
+ try:
+ Main.main_startup()
+ except KeyboardInterrupt:
+ print("User Cancelled")
+ sleep(3)
+ sys.exit(0)
diff --git a/modules/__init__.py b/modules/__init__.py
index 696f983..a62dc29 100644
--- a/modules/__init__.py
+++ b/modules/__init__.py
@@ -1,14 +1,14 @@
-from .proxyScraper import ProxyScraper
-from .configManager import Config_Manager
-from .auto_update import AutoUpdate
-from .logger import Logger
-from .pretty_print import *
-
-
-# Here are all modules for the sites that are supported
-from .e6systems import E6System
-from .rule34 import RULE34
-from .furbooru import FURBOORU
-from .multporn import Multporn
-from .yiffer import Yiffer
+from .proxyScraper import ProxyScraper
+from .configManager import Config_Manager
+from .auto_update import AutoUpdate
+from .logger import Logger
+from .pretty_print import *
+
+
+# Here are all modules for the sites that are supported
+from .e6systems import E6System
+from .rule34 import RULE34
+from .furbooru import FURBOORU
+from .multporn import Multporn
+from .yiffer import Yiffer
from .luscious import Luscious
\ No newline at end of file
diff --git a/modules/auto_update.py b/modules/auto_update.py
index 92a72d0..bd32835 100644
--- a/modules/auto_update.py
+++ b/modules/auto_update.py
@@ -1,87 +1,87 @@
-import requests
-import random
-import base64
-from termcolor import colored
-import inquirer
-import webbrowser
-import os
-from time import sleep
-from alive_progress import alive_bar
-
-from main import version
-from .logger import Logger
-from .pretty_print import error, ok
-
-class AutoUpdate:
-
- def Checker():
- try:
- url = "https://api.github.com/repos/Official-Husko/NN-Downloader/releases/latest?from=about"
-
- headers = {
- "User-Agent":f"nn-downloader/{version} (by Official Husko on GitHub)",
- "Accept": "application/vnd.github+json",
- "X-GitHub-Api-Version": "2022-11-28"
- }
-
- req = requests.get(url, headers=headers).json()
- repo_version = req.get("tag_name")
- download_link = req["assets"][0]["browser_download_url"]
-
- if str(version) < repo_version:
- print(colored("UPDATE AVAILABLE! ", "red", attrs=["blink"]))
-
- body = req.get("body")
- name = req.get("name")
- date = req.get("published_at").replace("T", " ").replace("Z", "")
-
- print("")
- print(f"Latest release is {colored(name, 'light_blue')} released on {colored(date, 'yellow')}")
- print("")
- print(body)
- print("")
- amount_question = [
- inquirer.List('selection',
- message=colored("Do you want to download the update?", "green"),
- choices=["Yes", "No"],
- ),
- ]
- amount_answers = inquirer.prompt(amount_question)
- print("")
- decision = amount_answers.get("selection")
- if decision == "Yes":
- r = requests.get(download_link, headers={"User-Agent":f"nn-downloader/{version} (by Official Husko on GitHub)"}, timeout=5, stream=True)
- with alive_bar(int(int(r.headers.get('content-length')) / 1024 + 1)) as bar:
- bar.text = f'-> Downloading Update {repo_version}, please wait...'
- file = open(f"nn-downloader-{repo_version}.exe", 'wb')
- for chunk in r.iter_content(chunk_size=1024):
- if chunk:
- file.write(chunk)
- file.flush()
- bar()
- print(f"{ok} Update successfully downloaded! The program will now close and delete the old exe.")
- if os.path.exists("delete-exe.bat"):
- os.remove("delete-exe.bat")
- with open("delete-exe.bat", "a") as bat_creator:
- bat_content = f'TASKKILL -F /IM NN-Downloader.exe\ntimeout 3\nDEL .\\NN-Downloader.exe\nren .\\nn-downloader-{repo_version}.exe NN-Downloader.exe\nDEL .\\delete-exe.bat'
- bat_creator.write(bat_content)
- bat_creator.close()
- os.startfile(r".\\delete-exe.bat")
- sleep(5)
- exit(0)
- elif decision == "No":
- if not os.path.exists("outdated"):
- with open("outdated", "a") as mark_outdated:
- mark_outdated.close()
- elif str(version) >= repo_version:
- try:
- os.remove("outdated")
- except Exception:
- pass
-
- except Exception as e:
- # Construct and print the error
- error_str = f"An error occured while checking for updates! Please report this. Exception: {e}"
- print(f"{error} {error_str}")
- Logger.log_event(error_str, req)
+import requests
+import random
+import base64
+from termcolor import colored
+import inquirer
+import webbrowser
+import os
+from time import sleep
+from alive_progress import alive_bar
+
+from main import version
+from .logger import Logger
+from .pretty_print import error, ok
+
+class AutoUpdate:
+
+ def Checker():
+ try:
+ url = "https://api.github.com/repos/Official-Husko/NN-Downloader/releases/latest?from=about"
+
+ headers = {
+ "User-Agent":f"nn-downloader/{version} (by Official Husko on GitHub)",
+ "Accept": "application/vnd.github+json",
+ "X-GitHub-Api-Version": "2022-11-28"
+ }
+
+ req = requests.get(url, headers=headers).json()
+ repo_version = req.get("tag_name")
+ download_link = req["assets"][0]["browser_download_url"]
+
+ if str(version) < repo_version:
+ print(colored("UPDATE AVAILABLE! ", "red", attrs=["blink"]))
+
+ body = req.get("body")
+ name = req.get("name")
+ date = req.get("published_at").replace("T", " ").replace("Z", "")
+
+ print("")
+ print(f"Latest release is {colored(name, 'light_blue')} released on {colored(date, 'yellow')}")
+ print("")
+ print(body)
+ print("")
+ amount_question = [
+ inquirer.List('selection',
+ message=colored("Do you want to download the update?", "green"),
+ choices=["Yes", "No"],
+ ),
+ ]
+ amount_answers = inquirer.prompt(amount_question)
+ print("")
+ decision = amount_answers.get("selection")
+ if decision == "Yes":
+ r = requests.get(download_link, headers={"User-Agent":f"nn-downloader/{version} (by Official Husko on GitHub)"}, timeout=5, stream=True)
+ with alive_bar(int(int(r.headers.get('content-length')) / 1024 + 1)) as bar:
+ bar.text = f'-> Downloading Update {repo_version}, please wait...'
+ file = open(f"nn-downloader-{repo_version}.exe", 'wb')
+ for chunk in r.iter_content(chunk_size=1024):
+ if chunk:
+ file.write(chunk)
+ file.flush()
+ bar()
+ print(f"{ok} Update successfully downloaded! The program will now close and delete the old exe.")
+ if os.path.exists("delete-exe.bat"):
+ os.remove("delete-exe.bat")
+ with open("delete-exe.bat", "a") as bat_creator:
+ bat_content = f'TASKKILL -F /IM NN-Downloader.exe\ntimeout 3\nDEL .\\NN-Downloader.exe\nren .\\nn-downloader-{repo_version}.exe NN-Downloader.exe\nDEL .\\delete-exe.bat'
+ bat_creator.write(bat_content)
+ bat_creator.close()
+ os.startfile(r".\\delete-exe.bat")
+ sleep(5)
+ exit(0)
+ elif decision == "No":
+ if not os.path.exists("outdated"):
+ with open("outdated", "a") as mark_outdated:
+ mark_outdated.close()
+ elif str(version) >= repo_version:
+ try:
+ os.remove("outdated")
+ except Exception:
+ pass
+
+ except Exception as e:
+ # Construct and print the error
+ error_str = f"An error occured while checking for updates! Please report this. Exception: {e}"
+ print(f"{error} {error_str}")
+ Logger.log_event(error_str, req)
sleep(7)
\ No newline at end of file
diff --git a/modules/configManager.py b/modules/configManager.py
index 789d32a..92a3b1d 100644
--- a/modules/configManager.py
+++ b/modules/configManager.py
@@ -1,77 +1,77 @@
-import json
-from time import sleep
-from termcolor import colored
-import os
-
-def_config_version = 1.6
-
-class Config_Manager():
-
- def creator():
- default_config = {
- "version": def_config_version,
- "proxies": True,
- "checkForUpdates": True,
- "oneTimeDownload": True,
- "advancedMode": False,
- "ai_training": False,
- "user_credentials": {
- "e621": {
- "apiUser": "",
- "apiKey": ""
- },
- "e6ai": {
- "apiUser": "",
- "apiKey": ""
- },
- "e926": {
- "apiUser": "",
- "apiKey": ""
- },
- "furbooru": {
- "apiKey": ""
- }
- },
- "blacklisted_tags": [
- "example1",
- "example2"
- ],
- "blacklisted_formats": [
- "example1",
- "example2"
- ]
- }
- with open("config.json", "w") as cf:
- json.dump(default_config, cf, indent=6)
- return 1
- # 1 stands for successful
-
- def reader():
- if os.path.exists("config.json"):
- with open("config.json", "r") as cf:
- try:
- config = json.load(cf)
- config_version = config["version"]
- advanced_mode = config["advancedMode"]
- except:
- config_version = 0
- advanced_mode = False
-
- if advanced_mode == True:
- return config
-
- elif config_version < def_config_version and advanced_mode != True:
- print(colored("You are using an outdated config version! Old one is backed up. Please reconfigure the new one.", "green"))
- if os.path.exists("old_config.json"):
- os.remove("old_config.json")
- os.rename("config.json", "old_config.json")
- else:
- os.rename("config.json", "old_config.json")
- Config_Manager.creator()
- sleep(7)
- exit(0)
- return config
- else:
- return 0
- # 0 means unsuccessful
+import json
+from time import sleep
+from termcolor import colored
+import os
+
+def_config_version = 1.6
+
+class Config_Manager():
+
+ def creator():
+ default_config = {
+ "version": def_config_version,
+ "proxies": True,
+ "checkForUpdates": True,
+ "oneTimeDownload": True,
+ "advancedMode": False,
+ "ai_training": False,
+ "user_credentials": {
+ "e621": {
+ "apiUser": "",
+ "apiKey": ""
+ },
+ "e6ai": {
+ "apiUser": "",
+ "apiKey": ""
+ },
+ "e926": {
+ "apiUser": "",
+ "apiKey": ""
+ },
+ "furbooru": {
+ "apiKey": ""
+ }
+ },
+ "blacklisted_tags": [
+ "example1",
+ "example2"
+ ],
+ "blacklisted_formats": [
+ "example1",
+ "example2"
+ ]
+ }
+ with open("config.json", "w") as cf:
+ json.dump(default_config, cf, indent=6)
+ return 1
+ # 1 stands for successful
+
+ def reader():
+ if os.path.exists("config.json"):
+ with open("config.json", "r") as cf:
+ try:
+ config = json.load(cf)
+ config_version = config["version"]
+ advanced_mode = config["advancedMode"]
+ except:
+ config_version = 0
+ advanced_mode = False
+
+ if advanced_mode == True:
+ return config
+
+ elif config_version < def_config_version and advanced_mode != True:
+ print(colored("You are using an outdated config version! Old one is backed up. Please reconfigure the new one.", "green"))
+ if os.path.exists("old_config.json"):
+ os.remove("old_config.json")
+ os.rename("config.json", "old_config.json")
+ else:
+ os.rename("config.json", "old_config.json")
+ Config_Manager.creator()
+ sleep(7)
+ exit(0)
+ return config
+ else:
+ return 0
+ # 0 means unsuccessful
\ No newline at end of file
diff --git a/modules/create_directory.py b/modules/create_directory.py
new file mode 100644
index 0000000..7e9b8c5
--- /dev/null
+++ b/modules/create_directory.py
@@ -0,0 +1,121 @@
+# Import Standard Libraries
+import os
+import re
+
+# Import Third Party Libraries
+
+# Import Local Libraries
+
+
+class DirectoryManager:
+ """
+ A class for managing directories.
+
+ This class provides methods for creating directories with sanitized, truncated, and space-replaced names.
+ It also handles the sanitization, truncation, and space replacement of folder names.
+
+ Attributes:
+ unsafe_chars (str): A regular expression pattern that matches characters not allowed in folder names.
+ max_folder_name_length (int): The maximum allowed length for folder names on Windows to avoid issues with long folder names.
+
+ Methods:
+ __init__(self) -> None: Initializes a new instance of the `DirectoryManager` class.
+ _sanitize_folder_name(self, folder_name: str) -> str: Sanitizes the folder name by removing any unsafe characters.
+ _truncate_folder_name(self, folder_name: str) -> str: Truncates the given folder name if it exceeds the maximum allowed length.
+ _replace_spaces_with_underscores(self, folder_name: str) -> str: Replaces spaces with underscores in the given folder name.
+ create_folder(self, folder_name: str) -> str: Creates a folder with the given folder name.
+ """
+
+ def __init__(self) -> None:
+ """
+ Initializes a new instance of the `DirectoryManager` class.
+
+ This method sets the `unsafe_chars` attribute to a regular expression pattern that matches characters that are not allowed in folder names.
+ It also sets the `max_folder_name_length` attribute to 90, which is the maximum length allowed for folder names on Windows to avoid issues with long folder names.
+
+ Parameters:
+ None
+
+ Returns:
+ None
+ """
+
+ self.unsafe_chars = r'[:*?"<>|$#@&%!`^(){}[\]=+~,;~\0]'
+ # I am keeping this at 90 to avoid general issues with long folder names especially on Windows
+ self.max_folder_name_length = 90
+
+ def _sanitize_folder_name(self, folder_name: str) -> str:
+ """
+ Sanitizes the folder name by removing unsafe characters based on the `unsafe_chars` attribute.
+
+ Parameters:
+ folder_name (str): The input folder name to be sanitized.
+
+ Returns:
+ str: The sanitized folder name.
+ """
+
+ sanitized_folder_name = re.sub(self.unsafe_chars, '', folder_name)
+
+ return sanitized_folder_name
+
+ def _truncate_folder_name(self, folder_name: str) -> str:
+ """
+ Truncates the given folder name if it exceeds the maximum allowed length.
+
+ Parameters:
+ folder_name (str): The input folder name to be truncated.
+
+ Returns:
+ str: The truncated folder name if it exceeds the maximum allowed length,
+ otherwise the original folder name.
+ """
+
+ if len(folder_name) > self.max_folder_name_length:
+ return folder_name[:self.max_folder_name_length]
+
+ return folder_name
+
+ def _replace_spaces_with_underscores(self, folder_name: str) -> str:
+ """
+ Replaces spaces with underscores in the given folder name.
+
+ Parameters:
+ folder_name (str): The input folder name with spaces to be replaced.
+
+ Returns:
+ str: The folder name with spaces replaced by underscores.
+ """
+
+ return folder_name.replace(" ", "_")
+
+ def create_folder(self, folder_name: str) -> str:
+ """
+ Creates a folder with the given folder name.
+
+ Parameters:
+ folder_name (str): The name of the folder to be created.
+
+ Returns:
+ str: The sanitized, truncated, and space-replaced folder name.
+
+ This function takes a folder name as input and performs the following steps:
+ 1. Sanitizes the folder name by removing any unsafe characters.
+ 2. Truncates the folder name if it exceeds the maximum allowed length.
+ 3. Replaces any spaces in the folder name with underscores.
+ 4. Creates the folder with the sanitized, truncated, and space-replaced name.
+ 5. Returns the sanitized, truncated, and space-replaced folder name.
+
+ Note:
+ - The function uses the private methods `_sanitize_folder_name`, `_truncate_folder_name`, and `_replace_spaces_with_underscores` to perform the sanitization, truncation, and space replacement respectively.
+ - The `os.makedirs` function is used to create the folder with the sanitized, truncated, and space-replaced name.
+ - The `exist_ok=True` parameter ensures that the function does not raise an exception if the folder already exists.
+ """
+
+ sanitized_folder_name = self._sanitize_folder_name(folder_name=folder_name)
+ truncated_folder_name = self._truncate_folder_name(folder_name=sanitized_folder_name)
+ replaced_spaces_folder_name = self._replace_spaces_with_underscores(folder_name=truncated_folder_name)
+
+ os.makedirs(replaced_spaces_folder_name, exist_ok=True)
+
+ return replaced_spaces_folder_name
diff --git a/modules/deprecated/e621.py b/modules/deprecated/e621.py
deleted file mode 100644
index f41ddf7..0000000
--- a/modules/deprecated/e621.py
+++ /dev/null
@@ -1,106 +0,0 @@
-from requests.auth import HTTPBasicAuth
-import requests
-import random
-from termcolor import colored
-from alive_progress import alive_bar
-from time import sleep
-from datetime import datetime
-import os
-
-from main import unsafe_chars
-now = datetime.now()
-dt_now = now.strftime("%d-%m-%Y_%H-%M-%S")
-
-class E621():
- def Fetcher(user_tags, user_blacklist, proxy_list, max_sites, user_proxies, apiUser ,apiKey, header, db):
- try:
- approved_list = []
- page = 1
- while True:
- URL = f"https://e621.net/posts.json?tags={user_tags}&limit=320&page={page}"
- if user_proxies == True:
- proxy = random.choice(proxy_list)
- raw_req = requests.get(URL, headers=header, proxies=proxy, auth=HTTPBasicAuth(apiUser, apiKey))
- else:
- raw_req = requests.get(URL, headers=header, auth=HTTPBasicAuth(apiUser, apiKey))
-
- req = raw_req.json()
-
- try:
- if req["message"] == "You cannot go beyond page 750. Please narrow your search terms.":
- print(colored(req["message"] + " (API limit)", "red"))
- sleep(5)
- break
- except:
- pass
-
- if req["posts"] == []:
- print(colored("No images found or all downloaded! Try different tags.", "yellow"))
- sleep(5)
- break
-
- elif page == max_sites:
- print(colored(f"Finished Downloading {max_sites} of {max_sites} pages.", "yellow"))
- sleep(5)
- break
-
- else:
- for item in req["posts"]:
- image_id = item["id"]
- image_address = item["file"]["url"]
- post_tags1 = item["tags"]["general"]
- post_tags2 = item["tags"]["species"]
- post_tags3 = item["tags"]["character"]
- post_tags4 = item["tags"]["copyright"]
- post_tags5 = item["tags"]["artist"]
- post_tags = post_tags1 + post_tags2 + post_tags3 + post_tags4 + post_tags5
- image_format = item["file"]["ext"]
- user_blacklist_lenght = len(user_blacklist)
- passed = 0
-
- for blacklisted_tag in user_blacklist:
- if blacklisted_tag in post_tags:
- break
- else:
- passed += 1
- if passed == user_blacklist_lenght and str(image_id) not in db and image_address != None:
- image_data = {"image_address": image_address, "image_format": image_format, "image_id": image_id}
- approved_list.append(image_data)
- else:
- pass
-
- # Download Each file
- with alive_bar(len(approved_list), calibrate=1, dual_line=True, title='Downloading') as bar:
- for data in approved_list:
- image_address = data["image_address"]
- image_format = data["image_format"]
- image_id = data["image_id"]
- bar.text = f'-> Downloading: {image_id}, please wait...'
- if user_proxies == True:
- proxy = random.choice(proxy_list)
- img_data = requests.get(image_address, proxies=proxy).content
- else:
- sleep(1)
- img_data = requests.get(image_address).content
-
- safe_user_tags = user_tags.replace(" ", "_")
- for char in unsafe_chars:
- safe_user_tags = safe_user_tags.replace(char, "")
-
- if not os.path.exists(f"media/{dt_now}_{safe_user_tags}"):
- os.mkdir(f"media/{dt_now}_{safe_user_tags}")
- with open(f"media/{dt_now}_{safe_user_tags}/{str(image_id)}.{image_format}", 'wb') as handler:
- handler.write(img_data)
- with open("db/e621.db", "a") as db_writer:
- db_writer.write(f"{str(image_id)}\n")
- bar()
-
- print(colored(f"Page {page} Completed", "green"))
- approved_list.clear()
- page += 1
- sleep(5)
-
- return {"status": "ok"}
-
- except Exception as e:
- return {"status": "error", "uinput": user_tags, "exception": str(e), "extra": raw_req.content}
\ No newline at end of file
diff --git a/modules/deprecated/e6ai.py b/modules/deprecated/e6ai.py
deleted file mode 100644
index 2fb382c..0000000
--- a/modules/deprecated/e6ai.py
+++ /dev/null
@@ -1,106 +0,0 @@
-from requests.auth import HTTPBasicAuth
-import requests
-import random
-from termcolor import colored
-from alive_progress import alive_bar
-from time import sleep
-from datetime import datetime
-import os
-
-from main import unsafe_chars
-now = datetime.now()
-dt_now = now.strftime("%d-%m-%Y_%H-%M-%S")
-
-class E6AI():
- def Fetcher(user_tags, user_blacklist, proxy_list, max_sites, user_proxies, apiUser ,apiKey, header, db):
- try:
- approved_list = []
- page = 1
- while True:
- URL = f"https://e6ai.net/posts.json?tags={user_tags}&limit=320&page={page}"
- if user_proxies == True:
- proxy = random.choice(proxy_list)
- raw_req = requests.get(URL, headers=header, proxies=proxy, auth=HTTPBasicAuth(apiUser, apiKey))
- else:
- raw_req = requests.get(URL, headers=header, auth=HTTPBasicAuth(apiUser, apiKey))
-
- req = raw_req.json()
-
- try:
- if req["message"] == "You cannot go beyond page 750. Please narrow your search terms.":
- print(colored(req["message"] + " (API limit)", "red"))
- sleep(5)
- break
- except:
- pass
-
- if req["posts"] == []:
- print(colored("No images found or all downloaded! Try different tags.", "yellow"))
- sleep(5)
- break
-
- elif page == max_sites:
- print(colored(f"Finished Downloading {max_sites} of {max_sites} pages.", "yellow"))
- sleep(5)
- break
-
- else:
- for item in req["posts"]:
- image_id = item["id"]
- image_address = item["file"]["url"]
- post_tags1 = item["tags"]["general"]
- post_tags2 = item["tags"]["species"]
- post_tags3 = item["tags"]["character"]
- post_tags4 = item["tags"]["director"]
- post_tags5 = item["tags"]["meta"]
- post_tags = post_tags1 + post_tags2 + post_tags3 + post_tags4 + post_tags5
- image_format = item["file"]["ext"]
- user_blacklist_lenght = len(user_blacklist)
- passed = 0
-
- for blacklisted_tag in user_blacklist:
- if blacklisted_tag in post_tags:
- break
- else:
- passed += 1
- if passed == user_blacklist_lenght and str(image_id) not in db and image_address != None:
- image_data = {"image_address": image_address, "image_format": image_format, "image_id": image_id}
- approved_list.append(image_data)
- else:
- pass
-
- # Download Each file
- with alive_bar(len(approved_list), calibrate=1, dual_line=True, title='Downloading') as bar:
- for data in approved_list:
- image_address = data["image_address"]
- image_format = data["image_format"]
- image_id = data["image_id"]
- bar.text = f'-> Downloading: {image_id}, please wait...'
- if user_proxies == True:
- proxy = random.choice(proxy_list)
- img_data = requests.get(image_address, proxies=proxy).content
- else:
- sleep(1)
- img_data = requests.get(image_address).content
-
- safe_user_tags = user_tags.replace(" ", "_")
- for char in unsafe_chars:
- safe_user_tags = safe_user_tags.replace(char, "")
-
- if not os.path.exists(f"media/{dt_now}_{safe_user_tags}"):
- os.mkdir(f"media/{dt_now}_{safe_user_tags}")
- with open(f"media/{dt_now}_{safe_user_tags}/{str(image_id)}.{image_format}", 'wb') as handler:
- handler.write(img_data)
- with open("db/e6ai.db", "a") as db_writer:
- db_writer.write(f"{str(image_id)}\n")
- bar()
-
- print(colored(f"Page {page} Completed", "green"))
- approved_list.clear()
- page += 1
- sleep(5)
-
- return {"status": "ok"}
-
- except Exception as e:
- return {"status": "error", "uinput": user_tags, "exception": str(e), "extra": raw_req.content}
\ No newline at end of file
diff --git a/modules/deprecated/e926.py b/modules/deprecated/e926.py
deleted file mode 100644
index 02c9334..0000000
--- a/modules/deprecated/e926.py
+++ /dev/null
@@ -1,106 +0,0 @@
-from requests.auth import HTTPBasicAuth
-import requests
-import random
-from termcolor import colored
-from alive_progress import alive_bar
-from time import sleep
-from datetime import datetime
-import os
-
-from main import unsafe_chars
-now = datetime.now()
-dt_now = now.strftime("%d-%m-%Y_%H-%M-%S")
-
-class E926():
- def Fetcher(user_tags, user_blacklist, proxy_list, max_sites, user_proxies, apiUser ,apiKey, header, db):
- try:
- approved_list = []
- page = 1
- while True:
- URL = f"https://e926.net/posts.json?tags={user_tags}&limit=320&page={page}"
- if user_proxies == True:
- proxy = random.choice(proxy_list)
- raw_req = requests.get(URL, headers=header, proxies=proxy, auth=HTTPBasicAuth(apiUser, apiKey))
- else:
- raw_req = requests.get(URL, headers=header, auth=HTTPBasicAuth(apiUser, apiKey))
-
- req = raw_req.json()
-
- try:
- if req["message"] == "You cannot go beyond page 750. Please narrow your search terms.":
- print(colored(req["message"] + " (API limit)", "red"))
- sleep(5)
- break
- except:
- pass
-
- if req["posts"] == []:
- print(colored("No images found or all downloaded! Try different tags.", "yellow"))
- sleep(5)
- break
-
- elif page == max_sites:
- print(colored(f"Finished Downloading {max_sites} of {max_sites} pages.", "yellow"))
- sleep(5)
- break
-
- else:
- for item in req["posts"]:
- image_id = item["id"]
- image_address = item["file"]["url"]
- post_tags1 = item["tags"]["general"]
- post_tags2 = item["tags"]["species"]
- post_tags3 = item["tags"]["character"]
- post_tags4 = item["tags"]["copyright"]
- post_tags5 = item["tags"]["artist"]
- post_tags = post_tags1 + post_tags2 + post_tags3 + post_tags4 + post_tags5
- image_format = item["file"]["ext"]
- user_blacklist_lenght = len(user_blacklist)
- passed = 0
-
- for blacklisted_tag in user_blacklist:
- if blacklisted_tag in post_tags:
- break
- else:
- passed += 1
- if passed == user_blacklist_lenght and str(image_id) not in db and image_address != None:
- image_data = {"image_address": image_address, "image_format": image_format, "image_id": image_id}
- approved_list.append(image_data)
- else:
- pass
-
- # Download Each file
- with alive_bar(len(approved_list), calibrate=1, dual_line=True, title='Downloading') as bar:
- for data in approved_list:
- image_address = data["image_address"]
- image_format = data["image_format"]
- image_id = data["image_id"]
- bar.text = f'-> Downloading: {image_id}, please wait...'
- if user_proxies == True:
- proxy = random.choice(proxy_list)
- img_data = requests.get(image_address, proxies=proxy).content
- else:
- sleep(1)
- img_data = requests.get(image_address).content
-
- safe_user_tags = user_tags.replace(" ", "_")
- for char in unsafe_chars:
- safe_user_tags = safe_user_tags.replace(char, "")
-
- if not os.path.exists(f"media/{dt_now}_{safe_user_tags}"):
- os.mkdir(f"media/{dt_now}_{safe_user_tags}")
- with open(f"media/{dt_now}_{safe_user_tags}/{str(image_id)}.{image_format}", 'wb') as handler:
- handler.write(img_data)
- with open("db/e621.db", "a") as db_writer:
- db_writer.write(f"{str(image_id)}\n")
- bar()
-
- print(colored(f"Page {page} Completed", "green"))
- approved_list.clear()
- page += 1
- sleep(5)
-
- return {"status": "ok"}
-
- except Exception as e:
- return {"status": "error", "uinput": user_tags, "exception": str(e), "extra": raw_req.content}
\ No newline at end of file
diff --git a/modules/e6systems.py b/modules/e6systems.py
index 8990f3c..d02d1a0 100644
--- a/modules/e6systems.py
+++ b/modules/e6systems.py
@@ -1,102 +1,106 @@
-import os
-import json
-import random
-import requests
-from requests.auth import HTTPBasicAuth
-from termcolor import colored
-from alive_progress import alive_bar
-from time import sleep
-from datetime import datetime
-
-from main import unsafe_chars
-
-class E6System:
- @staticmethod
- def fetcher(user_tags, user_blacklist, proxy_list, max_sites, user_proxies, api_user, api_key, header, db, site, ai_training):
- try:
- approved_list = []
- now = datetime.now()
- dt_now = now.strftime("%d-%m-%Y_%H-%M-%S")
- page = 1
-
- while True:
- URL = f"https://{site}.net/posts.json?tags={user_tags}&limit=320&page={page}"
- proxy = random.choice(proxy_list) if user_proxies else None
- raw_req = requests.get(URL, headers=header, proxies=proxy, auth=HTTPBasicAuth(api_user, api_key))
- req = raw_req.json()
-
- if "message" in req and req["message"] == "You cannot go beyond page 750. Please narrow your search terms.":
- print(colored(req["message"] + " (API limit)", "red"))
- sleep(5)
- break
-
- if not req["posts"]:
- print(colored("No images found or all downloaded! Try different tags.", "yellow"))
- sleep(5)
- break
-
- elif page == max_sites:
- print(colored(f"Finished Downloading {max_sites} of {max_sites} pages.", "yellow"))
- sleep(5)
- break
-
- else:
- for item in req["posts"]:
- image_id = item["id"]
- image_address = item["file"].get("url")
- meta_tags = item["tags"] if ai_training else []
- post_tags = [item["tags"][tag_type] for tag_type in ["general", "species", "character"]]
- post_tags += [item["tags"]["director"], item["tags"]["meta"]] if site == "e6ai" else [item["tags"]["copyright"], item["tags"]["artist"]]
- post_tags = sum(post_tags, [])
- user_blacklist_length = len(user_blacklist)
-
- passed = sum(blacklisted_tag in post_tags for blacklisted_tag in user_blacklist)
-
- if passed == 0 and not db and image_address and not any(tag in user_blacklist for tag in post_tags):
- image_data = {"image_address": image_address, "image_format": item["file"]["ext"], "image_id": image_id, "meta_tags": meta_tags}
- approved_list.append(image_data)
-
- elif db and str(image_id) not in db and image_address and not any(tag in user_blacklist for tag in post_tags):
- image_data = {"image_address": image_address, "image_format": item["file"]["ext"], "image_id": image_id, "meta_tags": meta_tags}
- approved_list.append(image_data)
-
- with alive_bar(len(approved_list), calibrate=1, dual_line=True, title='Downloading') as bar:
- for data in approved_list:
- image_address = data.get("image_address")
- image_format = data.get("image_format")
- image_id = data.get("image_id")
- meta_tags = data.get("meta_tags")
- bar.text = f'-> Downloading: {image_id}, please wait...'
-
- proxy = random.choice(proxy_list) if user_proxies else None
- img_data = requests.get(image_address, proxies=proxy).content if user_proxies else requests.get(image_address).content
-
- safe_user_tags = "".join(char for char in user_tags if char not in unsafe_chars).replace(" ", "_")
- directory = f"media/{dt_now}_{safe_user_tags}"
- meta_directory = f"{directory}/meta"
-
- os.makedirs(directory, exist_ok=True)
-
- if ai_training == True:
- os.makedirs(meta_directory, exist_ok=True)
- with open(f"{meta_directory}/{str(image_id)}.json", 'w') as handler:
- json.dump(meta_tags, handler, indent=6)
-
- with open(f"{directory}/{str(image_id)}.{image_format}", 'wb') as handler:
- handler.write(img_data)
-
- if db != False:
- with open(f"db/{site}.db", "a") as db_writer:
- db_writer.write(f"{str(image_id)}\n")
-
- bar()
-
- print(colored(f"Page {page} Completed", "green"))
- approved_list.clear()
- page += 1
- sleep(5)
-
- return {"status": "ok"}
-
- except Exception as e:
- return {"status": "error", "uinput": user_tags, "exception": str(e), "extra": raw_req.content}
+import os
+import json
+import random
+import requests
+from requests.auth import HTTPBasicAuth
+from termcolor import colored
+from alive_progress import alive_bar
+from time import sleep
+from datetime import datetime
+
+from main import unsafe_chars
+from .create_directory import DirectoryManager
+
+class E6System:
+ @staticmethod
+ def fetcher(user_tags, user_blacklist, proxy_list, max_sites, user_proxies, api_user, api_key, header, db, site, ai_training):
+ try:
+ Directory_Manager_Instance = DirectoryManager()
+
+ approved_list = []
+ now = datetime.now()
+ dt_now = now.strftime("%d-%m-%Y_%H-%M-%S")
+ page = 1
+
+ while True:
+ URL = f"https://{site}.net/posts.json?tags={user_tags}&limit=320&page={page}"
+ proxy = random.choice(proxy_list) if user_proxies else None
+ raw_req = requests.get(URL, headers=header, proxies=proxy, auth=HTTPBasicAuth(api_user, api_key))
+ req = raw_req.json()
+
+ if "message" in req and req["message"] == "You cannot go beyond page 750. Please narrow your search terms.":
+ print(colored(req["message"] + " (API limit)", "red"))
+ sleep(5)
+ break
+
+ if not req["posts"]:
+ print(colored("No images found or all downloaded! Try different tags.", "yellow"))
+ sleep(5)
+ break
+
+ elif page == max_sites:
+ print(colored(f"Finished Downloading {max_sites} of {max_sites} pages.", "yellow"))
+ sleep(5)
+ break
+
+ else:
+ for item in req["posts"]:
+ image_id = item["id"]
+ image_address = item["file"].get("url")
+ meta_tags = item["tags"] if ai_training else []
+ post_tags = [item["tags"][tag_type] for tag_type in ["general", "species", "character"]]
+ post_tags += [item["tags"]["director"], item["tags"]["meta"]] if site == "e6ai" else [item["tags"]["copyright"], item["tags"]["artist"]]
+ post_tags = sum(post_tags, [])
+ user_blacklist_length = len(user_blacklist)
+
+ passed = sum(blacklisted_tag in post_tags for blacklisted_tag in user_blacklist)
+
+ if passed == 0 and not db and image_address and not any(tag in user_blacklist for tag in post_tags):
+ image_data = {"image_address": image_address, "image_format": item["file"]["ext"], "image_id": image_id, "meta_tags": meta_tags}
+ approved_list.append(image_data)
+
+ elif db and str(image_id) not in db and image_address and not any(tag in user_blacklist for tag in post_tags):
+ image_data = {"image_address": image_address, "image_format": item["file"]["ext"], "image_id": image_id, "meta_tags": meta_tags}
+ approved_list.append(image_data)
+
+ with alive_bar(len(approved_list), calibrate=1, dual_line=True, title='Downloading') as bar:
+ for data in approved_list:
+ image_address = data.get("image_address")
+ image_format = data.get("image_format")
+ image_id = data.get("image_id")
+ meta_tags = data.get("meta_tags")
+ bar.text = f'-> Downloading: {image_id}, please wait...'
+
+ proxy = random.choice(proxy_list) if user_proxies else None
+ img_data = requests.get(image_address, proxies=proxy).content if user_proxies else requests.get(image_address).content
+
+ # TODO: Rewrite this because it currently does these static vars every time
+ directory = f"media/{dt_now} {user_tags}"
+
+ directory = Directory_Manager_Instance.create_folder(folder_name=directory)
+
+ meta_directory = f"{directory}/meta"
+
+ if ai_training == True:
+ os.makedirs(meta_directory, exist_ok=True)
+ with open(f"{meta_directory}/{str(image_id)}.json", 'w', encoding='utf-8') as handler:
+ json.dump(meta_tags, handler, indent=6)
+
+ with open(f"{directory}/{str(image_id)}.{image_format}", 'wb') as handler:
+ handler.write(img_data)
+
+ if db != False:
+ with open(f"db/{site}.db", "a", encoding="utf-8") as db_writer:
+ db_writer.write(f"{str(image_id)}\n")
+
+ bar()
+
+ print(colored(f"Page {page} Completed", "green"))
+ approved_list.clear()
+ page += 1
+ sleep(5)
+
+ return {"status": "ok"}
+
+ except Exception as e:
+ return {"status": "error", "uinput": user_tags, "exception": str(e), "extra": raw_req.content}
diff --git a/modules/furbooru.py b/modules/furbooru.py
index 0b9dc2b..7d990c5 100644
--- a/modules/furbooru.py
+++ b/modules/furbooru.py
@@ -1,97 +1,97 @@
-import requests # Importing requests library for making HTTP requests
-import random # Importing random library for random selection
-from termcolor import colored # Importing colored function from termcolor for colored output
-from alive_progress import alive_bar # Importing alive_bar from alive_progress for progress bar
-from time import sleep # Importing sleep function from time for delaying execution
-from datetime import datetime # Importing datetime class from datetime module for date and time operations
-import os # Importing os module for operating system related functionalities
-
-from main import unsafe_chars # Importing unsafe_chars from main module
-
-now = datetime.now() # Getting current date and time
-dt_now = now.strftime("%d-%m-%Y_%H-%M-%S") # Formatting current date and time
-
-class FURBOORU():
- @staticmethod
- def fetcher(user_tags, user_blacklist, proxy_list, max_sites, user_proxies, api_key, header, db):
- """
- Fetches images from Furbooru API based on user-defined tags and parameters.
-
- Args:
- user_tags (str): User-defined tags for image search.
- user_blacklist (list): List of tags to blacklist.
- proxy_list (list): List of proxies to use for requests.
- max_sites (int): Maximum number of pages to fetch images from.
- user_proxies (bool): Flag indicating whether to use proxies for requests.
- api_key (str): API key for accessing the Furbooru API.
- header (dict): HTTP header for requests.
- db (bool or set): Database of downloaded images.
-
- Returns:
- dict: Dictionary containing status of the operation.
- """
- try:
- user_tags = user_tags.replace(" ", ", ") # Replace spaces in user_tags with commas
- approved_list = [] # List to store approved images
- page = 1 # Starting page number
-
- while True:
- URL = f"https://furbooru.org/api/v1/json/search/images?q={user_tags}&page={page}&key={api_key}&per_page=50"
- # Constructing URL for API request
- proxy = random.choice(proxy_list) if user_proxies else None # Selecting random proxy if user_proxies is True
- raw_req = requests.get(URL, headers=header, proxies=proxy) # Making HTTP GET request
- req = raw_req.json() # Parsing JSON response
-
- if req["total"] == 0:
- print(colored("No images found or all downloaded! Try different tags.", "yellow")) # Display message if no images found
- sleep(5) # Wait for 5 seconds
- break
- elif page == max_sites:
- print(colored(f"Finished Downloading {max_sites} of {max_sites} pages.", "yellow")) # Display message when maximum pages reached
- sleep(5) # Wait for 5 seconds
- break
- else:
- for item in req["images"]:
- if not item["hidden_from_users"]:
- post_tags = item["tags"]
- if any(tag in user_blacklist for tag in post_tags):
- continue # Skip image if any blacklisted tag is found
-
- image_address = item["representations"]["full"]
- image_format = item["format"]
- image_id = item["id"]
-
- if db is False or str(image_id) not in db:
- image_data = {"image_address": image_address, "image_format": image_format, "image_id": image_id}
- approved_list.append(image_data)
-
- with alive_bar(len(approved_list), calibrate=1, dual_line=True, title='Downloading') as bar:
- for data in approved_list:
- image_address = data["image_address"]
- image_format = data["image_format"]
- image_id = data["image_id"]
- bar.text = f'-> Downloading: {image_id}, please wait...'
-
- proxy = random.choice(proxy_list) if user_proxies else None
- img_data = requests.get(image_address, proxies=proxy).content if user_proxies else requests.get(image_address).content
-
- safe_user_tags = "".join(char for char in user_tags if char not in unsafe_chars).replace(" ", "_")
- directory = f"media/{dt_now}_{safe_user_tags}"
- os.makedirs(directory, exist_ok=True)
-
- with open(f"{directory}/{str(image_id)}.{image_format}", 'wb') as handler:
- handler.write(img_data)
-
- if db != False:
- with open("db/furbooru.db", "a") as db_writer:
- db_writer.write(f"{str(image_id)}\n")
-
- bar()
- print(colored(f"Page {page} Completed", "green")) # Display completion message for current page
- approved_list.clear() # Clear approved_list for next page
- page += 1 # Move to next page
-
- return {"status": "ok"} # Return success status
-
- except Exception as e:
- return {"status": "error", "uinput": user_tags, "exception": str(e), "extra": raw_req.content} # Return error status along with details
+import requests # Importing requests library for making HTTP requests
+import random # Importing random library for random selection
+from termcolor import colored # Importing colored function from termcolor for colored output
+from alive_progress import alive_bar # Importing alive_bar from alive_progress for progress bar
+from time import sleep # Importing sleep function from time for delaying execution
+from datetime import datetime # Importing datetime class from datetime module for date and time operations
+import os # Importing os module for operating system related functionalities
+
+from main import unsafe_chars # Importing unsafe_chars from main module
+
+now = datetime.now() # Getting current date and time
+dt_now = now.strftime("%d-%m-%Y_%H-%M-%S") # Formatting current date and time
+
+class FURBOORU():
+ @staticmethod
+ def fetcher(user_tags, user_blacklist, proxy_list, max_sites, user_proxies, api_key, header, db):
+ """
+ Fetches images from Furbooru API based on user-defined tags and parameters.
+
+ Args:
+ user_tags (str): User-defined tags for image search.
+ user_blacklist (list): List of tags to blacklist.
+ proxy_list (list): List of proxies to use for requests.
+ max_sites (int): Maximum number of pages to fetch images from.
+ user_proxies (bool): Flag indicating whether to use proxies for requests.
+ api_key (str): API key for accessing the Furbooru API.
+ header (dict): HTTP header for requests.
+ db (bool or set): Database of downloaded images.
+
+ Returns:
+ dict: Dictionary containing status of the operation.
+ """
+ try:
+ user_tags = user_tags.replace(" ", ", ") # Replace spaces in user_tags with commas
+ approved_list = [] # List to store approved images
+ page = 1 # Starting page number
+
+ while True:
+ URL = f"https://furbooru.org/api/v1/json/search/images?q={user_tags}&page={page}&key={api_key}&per_page=50"
+ # Constructing URL for API request
+ proxy = random.choice(proxy_list) if user_proxies else None # Selecting random proxy if user_proxies is True
+ raw_req = requests.get(URL, headers=header, proxies=proxy) # Making HTTP GET request
+ req = raw_req.json() # Parsing JSON response
+
+ if req["total"] == 0:
+ print(colored("No images found or all downloaded! Try different tags.", "yellow")) # Display message if no images found
+ sleep(5) # Wait for 5 seconds
+ break
+ elif page == max_sites:
+ print(colored(f"Finished Downloading {max_sites} of {max_sites} pages.", "yellow")) # Display message when maximum pages reached
+ sleep(5) # Wait for 5 seconds
+ break
+ else:
+ for item in req["images"]:
+ if not item["hidden_from_users"]:
+ post_tags = item["tags"]
+ if any(tag in user_blacklist for tag in post_tags):
+ continue # Skip image if any blacklisted tag is found
+
+ image_address = item["representations"]["full"]
+ image_format = item["format"]
+ image_id = item["id"]
+
+ if db is False or str(image_id) not in db:
+ image_data = {"image_address": image_address, "image_format": image_format, "image_id": image_id}
+ approved_list.append(image_data)
+
+ with alive_bar(len(approved_list), calibrate=1, dual_line=True, title='Downloading') as bar:
+ for data in approved_list:
+ image_address = data["image_address"]
+ image_format = data["image_format"]
+ image_id = data["image_id"]
+ bar.text = f'-> Downloading: {image_id}, please wait...'
+
+ proxy = random.choice(proxy_list) if user_proxies else None
+ img_data = requests.get(image_address, proxies=proxy).content if user_proxies else requests.get(image_address).content
+
+ safe_user_tags = "".join(char for char in user_tags if char not in unsafe_chars).replace(" ", "_")
+ directory = f"media/{dt_now}_{safe_user_tags}"
+ os.makedirs(directory, exist_ok=True)
+
+ with open(f"{directory}/{str(image_id)}.{image_format}", 'wb') as handler:
+ handler.write(img_data)
+
+ if db != False:
+ with open("db/furbooru.db", "a") as db_writer:
+ db_writer.write(f"{str(image_id)}\n")
+
+ bar()
+ print(colored(f"Page {page} Completed", "green")) # Display completion message for current page
+ approved_list.clear() # Clear approved_list for next page
+ page += 1 # Move to next page
+
+ return {"status": "ok"} # Return success status
+
+ except Exception as e:
+ return {"status": "error", "uinput": user_tags, "exception": str(e), "extra": raw_req.content} # Return error status along with details
diff --git a/modules/logger.py b/modules/logger.py
index 6812f86..7f52c78 100644
--- a/modules/logger.py
+++ b/modules/logger.py
@@ -1,17 +1,17 @@
-import os
-from datetime import datetime
-from main import version
-
-class Logger:
-
- def startup():
- if os.path.exists("runtime.log"):
- os.remove("runtime.log")
-
- def log_event(message, extra="", uinput=""):
- with open("runtime.log", "a") as log_dumper:
- base_line = f"{datetime.now()} | v{version} | Error: {message}"
- if extra == "":
- log_dumper.writelines(base_line + "\n")
- else:
+import os
+from datetime import datetime
+from main import version
+
+class Logger:
+
+ def startup():
+ if os.path.exists("runtime.log"):
+ os.remove("runtime.log")
+
+ def log_event(message, extra="", uinput=""):
+ with open("runtime.log", "a") as log_dumper:
+ base_line = f"{datetime.now()} | v{version} | Error: {message}"
+ if extra == "":
+ log_dumper.writelines(base_line + "\n")
+ else:
log_dumper.writelines(f"{base_line} | Additional Info: {extra} | Input: {uinput}\n")
\ No newline at end of file
diff --git a/modules/luscious.py b/modules/luscious.py
index 304c10e..a2f5d86 100644
--- a/modules/luscious.py
+++ b/modules/luscious.py
@@ -1,96 +1,96 @@
-import requests
-import random
-from termcolor import colored
-from time import sleep
-from alive_progress import alive_bar
-import os
-import json
-
-from main import unsafe_chars
-from main import version
-
-class Luscious():
- def Fetcher(proxy_list, user_proxies, header, URL):
- try:
- # sort link for category
- parts = URL.split("/")
- if parts[3] == "pictures":
- title = parts[5].partition("_")
- id = parts[5].rpartition("_")
- elif parts[3] in ["album", "albums"]:
- title = parts[4].partition("_")
- id = parts[4].rpartition("_")
- else:
- print("An error occured! Please report this with the link you used.")
- sleep(5)
- return
- id = id[2]
- title = title[0]
-
- page = 1
- while True:
- header = {"User-Agent":f"nn-downloader/{version} (by Official Husko on GitHub)", "Content-Type": "application/json", "Accept": "application/json"}
- data = {"id":"6","operationName":"PictureListInsideAlbum","query":"\n query PictureListInsideAlbum($input: PictureListInput!) {\n picture {\n list(input: $input) {\n info {\n ...FacetCollectionInfo\n }\n items {\n __typename\n id\n title\n description\n created\n like_status\n number_of_comments\n number_of_favorites\n moderation_status\n width\n height\n resolution\n aspect_ratio\n url_to_original\n url_to_video\n is_animated\n position\n permissions\n url\n tags {\n category\n text\n url\n }\n thumbnails {\n width\n height\n size\n url\n }\n }\n }\n }\n}\n \n fragment FacetCollectionInfo on FacetCollectionInfo {\n page\n has_next_page\n has_previous_page\n total_items\n total_pages\n items_per_page\n url_complete\n}\n ","variables":{"input":{"filters":[{"name":"album_id","value":id}],"display":"position","items_per_page":50,"page":page}}}
- data = json.dumps(data)
- API_URL = "https://members.luscious.net/graphql/nobatch/?operationName=PictureListInsideAlbum"
- if user_proxies == True:
- proxy = random.choice(proxy_list)
- raw_req = requests.post(API_URL, headers=header, proxies=proxy, data=data)
- else:
- raw_req = requests.post(API_URL, headers=header, data=data)
-
- req = raw_req.json()
-
- avail_sites = req["data"]["picture"]["list"]["info"]["total_pages"]
- total_items = req["data"]["picture"]["list"]["info"]["total_items"]
-
- if page > avail_sites:
- print("")
- print(colored(f"No Further Sites Found.", "green"))
- sleep(3)
- break
-
- if req["data"]["picture"]["list"]["items"] == [] and page == 2:
- print("An error occured! Please report this with the link you used.")
- sleep(5)
- break
-
- # Download Each file
- with alive_bar(total_items, calibrate=1, dual_line=True, title='Downloading') as bar:
- for item in req["data"]["picture"]["list"]["items"]:
-
- image_id = item["id"]
- image_title = item["title"]
- image_address = item["url_to_original"]
- image_format = image_address.rpartition(".")
- bar.text = f'-> Downloading: {image_title}, please wait...'
-
- if user_proxies == True:
- proxy = random.choice(proxy_list)
- img_data = requests.get(image_address, proxies=proxy).content
- else:
- sleep(1)
- img_data = requests.get(image_address).content
-
- safe_title = title.replace(" ", "_")
- for char in unsafe_chars:
- safe_title = safe_title.replace(char, "")
-
- safe_image_title = image_title.replace(" ", "_")
- for char in unsafe_chars:
- safe_image_title = safe_image_title.replace(char, "")
-
- if not os.path.exists(f"media/{safe_title}"):
- os.mkdir(f"media/{safe_title}")
- with open(f"media/{safe_title}/{str(safe_image_title)}.{image_format[2]}", 'wb') as handler:
- handler.write(img_data)
- bar()
-
- print(colored(f"Page {page} Completed", "green"))
- page += 1
- sleep(5)
-
- return {"status": "ok"}
-
- except Exception as e:
+import requests
+import random
+from termcolor import colored
+from time import sleep
+from alive_progress import alive_bar
+import os
+import json
+
+from main import unsafe_chars
+from main import version
+
+class Luscious():
+ def Fetcher(proxy_list, user_proxies, header, URL):
+ try:
+ # sort link for category
+ parts = URL.split("/")
+ if parts[3] == "pictures":
+ title = parts[5].partition("_")
+ id = parts[5].rpartition("_")
+ elif parts[3] in ["album", "albums"]:
+ title = parts[4].partition("_")
+ id = parts[4].rpartition("_")
+ else:
+ print("An error occured! Please report this with the link you used.")
+ sleep(5)
+ return
+ id = id[2]
+ title = title[0]
+
+ page = 1
+ while True:
+ header = {"User-Agent":f"nn-downloader/{version} (by Official Husko on GitHub)", "Content-Type": "application/json", "Accept": "application/json"}
+ data = {"id":"6","operationName":"PictureListInsideAlbum","query":"\n query PictureListInsideAlbum($input: PictureListInput!) {\n picture {\n list(input: $input) {\n info {\n ...FacetCollectionInfo\n }\n items {\n __typename\n id\n title\n description\n created\n like_status\n number_of_comments\n number_of_favorites\n moderation_status\n width\n height\n resolution\n aspect_ratio\n url_to_original\n url_to_video\n is_animated\n position\n permissions\n url\n tags {\n category\n text\n url\n }\n thumbnails {\n width\n height\n size\n url\n }\n }\n }\n }\n}\n \n fragment FacetCollectionInfo on FacetCollectionInfo {\n page\n has_next_page\n has_previous_page\n total_items\n total_pages\n items_per_page\n url_complete\n}\n ","variables":{"input":{"filters":[{"name":"album_id","value":id}],"display":"position","items_per_page":50,"page":page}}}
+ data = json.dumps(data)
+ API_URL = "https://members.luscious.net/graphql/nobatch/?operationName=PictureListInsideAlbum"
+ if user_proxies == True:
+ proxy = random.choice(proxy_list)
+ raw_req = requests.post(API_URL, headers=header, proxies=proxy, data=data)
+ else:
+ raw_req = requests.post(API_URL, headers=header, data=data)
+
+ req = raw_req.json()
+
+ avail_sites = req["data"]["picture"]["list"]["info"]["total_pages"]
+ total_items = req["data"]["picture"]["list"]["info"]["total_items"]
+
+ if page > avail_sites:
+ print("")
+ print(colored(f"No Further Sites Found.", "green"))
+ sleep(3)
+ break
+
+ if req["data"]["picture"]["list"]["items"] == [] and page == 2:
+ print("An error occured! Please report this with the link you used.")
+ sleep(5)
+ break
+
+ # Download Each file
+ with alive_bar(total_items, calibrate=1, dual_line=True, title='Downloading') as bar:
+ for item in req["data"]["picture"]["list"]["items"]:
+
+ image_id = item["id"]
+ image_title = item["title"]
+ image_address = item["url_to_original"]
+ image_format = image_address.rpartition(".")
+ bar.text = f'-> Downloading: {image_title}, please wait...'
+
+ if user_proxies == True:
+ proxy = random.choice(proxy_list)
+ img_data = requests.get(image_address, proxies=proxy).content
+ else:
+ sleep(1)
+ img_data = requests.get(image_address).content
+
+ safe_title = title.replace(" ", "_")
+ for char in unsafe_chars:
+ safe_title = safe_title.replace(char, "")
+
+ safe_image_title = image_title.replace(" ", "_")
+ for char in unsafe_chars:
+ safe_image_title = safe_image_title.replace(char, "")
+
+ if not os.path.exists(f"media/{safe_title}"):
+ os.mkdir(f"media/{safe_title}")
+ with open(f"media/{safe_title}/{str(safe_image_title)}.{image_format[2]}", 'wb') as handler:
+ handler.write(img_data)
+ bar()
+
+ print(colored(f"Page {page} Completed", "green"))
+ page += 1
+ sleep(5)
+
+ return {"status": "ok"}
+
+ except Exception as e:
return {"status": "error", "uinput": URL, "exception": str(e), "extra": raw_req.content}
\ No newline at end of file
diff --git a/modules/multporn.py b/modules/multporn.py
index 978d0af..4e6320e 100644
--- a/modules/multporn.py
+++ b/modules/multporn.py
@@ -1,123 +1,123 @@
-import requests
-import random
-import re
-import xmltodict
-from termcolor import colored
-from time import sleep
-from alive_progress import alive_bar
-import os
-
-from main import unsafe_chars
-
-class Multporn():
- def Fetcher(proxy_list, user_proxies, header, URL):
- try:
- media = []
- progress = 0
-
- # sort link for category
- parts = URL.split("/")
- type = parts[3]
- title = parts[4]
-
- if type in ["comics", "hentai_manga", "gay_porn_comics", "gif", "humor"]:
- type = "field_com_pages"
-
- elif type in ["pictures", "hentai"]:
- type = "field_img"
-
- elif type == "rule_63":
- type = "field_rule_63_img"
-
- elif type == "games":
- type = "field_screenshots"
-
- elif type == "video":
- print("[ " + colored("i","blue") + " ] " + "Sorry but videos are currently not supported.")
- sleep(5)
- return
-
- else:
- print("[ " + colored("i","blue") + " ] " + "Sorry but this type is not recognized. Please open a ticket with the link.")
- sleep(5)
- return
-
- # fetch item id
- if user_proxies == True:
- proxy = random.choice(proxy_list)
- raw_req = requests.get(URL, headers=header, proxies=proxy)
- else:
- raw_req = requests.get(URL, headers=header)
-
- # extract item id
- try:
- if raw_req.headers.get("link", None) is not None:
- raw_link = raw_req.headers.get("link")
-
- else:
- pattern = r''
- # Search for the pattern in the HTML content
- match = re.search(pattern, raw_req.text)
-
- if match:
- raw_link = match.group(1)
-
- except Exception as e:
- print("[ " + colored("-","red") + " ] " + f"Node Link not Found. Double check the link else report this. Error: {e}")
- sleep(5)
- return
-
- link = re.findall("(http|https|ftp):[/]{2}([a-zA-Z0-9-.]+.[a-zA-Z]{2,4})(:[0-9]+)?/?([a-zA-Z0-9-._?,'/\+&%$#=~]*)", raw_link)
- id = link[0][3]
-
- # fetch juicebox with all images inside
- FURL = f"https://multporn.net/juicebox/xml/field/node/{id}/{type}/full"
- if user_proxies == True:
- proxy = random.choice(proxy_list)
- req = requests.get(FURL, headers=header, proxies=proxy)
- else:
- req = requests.get(FURL, headers=header)
-
- # something really got fucked if it returns 404
- if req.status_code == 404:
- print(colored("An error occurred! please report this to the dev"))
- sleep(3)
- return
-
- # convert the xml to json for the sake of my mental health
- juicebox_data = xmltodict.parse(req.content)
-
- # get all images into a list
- for images in juicebox_data["juicebox"]["image"]:
- image_url = images["@linkURL"]
- media.append(image_url)
-
- # Download all images
- with alive_bar(len(media), calibrate=1, dual_line=True, title='Downloading') as bar:
- bar.text = f'-> Downloading: {title}, please wait...'
- for image in media:
- image_format = image.rpartition(".")
- progress += 1
- if user_proxies == True:
- proxy = random.choice(proxy_list)
- img_data = requests.get(image, proxies=proxy).content
- else:
- sleep(1)
- img_data = requests.get(image).content
-
- safe_title = title.replace(" ", "_")
- for char in unsafe_chars:
- safe_title = safe_title.replace(char, "")
-
- if not os.path.exists(f"media/{safe_title}"):
- os.mkdir(f"media/{safe_title}")
- with open(f"media/{safe_title}/{str(progress)}.{image_format[2]}", 'wb') as handler:
- handler.write(img_data)
- bar()
- print("[ " + colored("i","blue") + " ] " + f"Completed downloading {title}!")
- sleep(5)
-
- return {"status": "ok"}
-
- except Exception as e:
+import requests
+import random
+import re
+import xmltodict
+from termcolor import colored
+from time import sleep
+from alive_progress import alive_bar
+import os
+
+from main import unsafe_chars
+
+class Multporn():
+ def Fetcher(proxy_list, user_proxies, header, URL):
+ try:
+ media = []
+ progress = 0
+
+ # sort link for category
+ parts = URL.split("/")
+ type = parts[3]
+ title = parts[4]
+
+ if type in ["comics", "hentai_manga", "gay_porn_comics", "gif", "humor"]:
+ type = "field_com_pages"
+
+ elif type in ["pictures", "hentai"]:
+ type = "field_img"
+
+ elif type == "rule_63":
+ type = "field_rule_63_img"
+
+ elif type == "games":
+ type = "field_screenshots"
+
+ elif type == "video":
+ print("[ " + colored("i","blue") + " ] " + "Sorry but videos are currently not supported.")
+ sleep(5)
+ return
+
+ else:
+ print("[ " + colored("i","blue") + " ] " + "Sorry but this type is not recognized. Please open a ticket with the link.")
+ sleep(5)
+ return
+
+ # fetch item id
+ if user_proxies == True:
+ proxy = random.choice(proxy_list)
+ raw_req = requests.get(URL, headers=header, proxies=proxy)
+ else:
+ raw_req = requests.get(URL, headers=header)
+
+ # extract item id
+ try:
+ if raw_req.headers.get("link", None) is not None:
+ raw_link = raw_req.headers.get("link")
+
+ else:
+ pattern = r''
+ # Search for the pattern in the HTML content
+ match = re.search(pattern, raw_req.text)
+
+ if match:
+ raw_link = match.group(1)
+
+ except Exception as e:
+ print("[ " + colored("-","red") + " ] " + f"Node Link not Found. Double check the link else report this. Error: {e}")
+ sleep(5)
+ return
+
+ link = re.findall("(http|https|ftp):[/]{2}([a-zA-Z0-9-.]+.[a-zA-Z]{2,4})(:[0-9]+)?/?([a-zA-Z0-9-._?,'/\+&%$#=~]*)", raw_link)
+ id = link[0][3]
+
+ # fetch juicebox with all images inside
+ FURL = f"https://multporn.net/juicebox/xml/field/node/{id}/{type}/full"
+ if user_proxies == True:
+ proxy = random.choice(proxy_list)
+ req = requests.get(FURL, headers=header, proxies=proxy)
+ else:
+ req = requests.get(FURL, headers=header)
+
+ # something really got fucked if it returns 404
+ if req.status_code == 404:
+ print(colored("An error occurred! please report this to the dev"))
+ sleep(3)
+ return
+
+ # convert the xml to json for the sake of my mental health
+ juicebox_data = xmltodict.parse(req.content)
+
+ # get all images into a list
+ for images in juicebox_data["juicebox"]["image"]:
+ image_url = images["@linkURL"]
+ media.append(image_url)
+
+ # Download all images
+ with alive_bar(len(media), calibrate=1, dual_line=True, title='Downloading') as bar:
+ bar.text = f'-> Downloading: {title}, please wait...'
+ for image in media:
+ image_format = image.rpartition(".")
+ progress += 1
+ if user_proxies == True:
+ proxy = random.choice(proxy_list)
+ img_data = requests.get(image, proxies=proxy).content
+ else:
+ sleep(1)
+ img_data = requests.get(image).content
+
+ safe_title = title.replace(" ", "_")
+ for char in unsafe_chars:
+ safe_title = safe_title.replace(char, "")
+
+ if not os.path.exists(f"media/{safe_title}"):
+ os.mkdir(f"media/{safe_title}")
+ with open(f"media/{safe_title}/{str(progress)}.{image_format[2]}", 'wb') as handler:
+ handler.write(img_data)
+ bar()
+ print("[ " + colored("i","blue") + " ] " + f"Completed downloading {title}!")
+ sleep(5)
+
+ return {"status": "ok"}
+
+ except Exception as e:
return {"status": "error", "uinput": URL, "exception": str(e), "extra": raw_req.content}
\ No newline at end of file
diff --git a/modules/pretty_print.py b/modules/pretty_print.py
index 22ad7e6..8f29e8f 100644
--- a/modules/pretty_print.py
+++ b/modules/pretty_print.py
@@ -1,7 +1,7 @@
-from termcolor import colored
-
-major_error = str(colored(f"[{colored('!!!', 'red')}]"))
-warning = str(colored(f"[{colored('!', 'yellow')}]"))
-error = str(colored(f"[{colored('!', 'red')}]"))
-info = str(colored(f"[{colored('i', 'light_blue')}]"))
+from termcolor import colored
+
+major_error = str(colored(f"[{colored('!!!', 'red')}]"))
+warning = str(colored(f"[{colored('!', 'yellow')}]"))
+error = str(colored(f"[{colored('!', 'red')}]"))
+info = str(colored(f"[{colored('i', 'light_blue')}]"))
ok = str(colored(f"[{colored('+', 'green')}]"))
\ No newline at end of file
diff --git a/modules/proxyScraper.py b/modules/proxyScraper.py
index 899ab1c..ff65fa3 100644
--- a/modules/proxyScraper.py
+++ b/modules/proxyScraper.py
@@ -1,25 +1,25 @@
-import requests
-
-proxy_source_list = [
- "https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/http.txt",
- "https://raw.githubusercontent.com/ShiftyTR/Proxy-List/master/http.txt",
- "https://raw.githubusercontent.com/mmpx12/proxy-list/master/http.txt",
- "https://raw.githubusercontent.com/Volodichev/proxy-list/main/http.txt",
- "https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt",
- "https://raw.githubusercontent.com/roma8ok/proxy-list/main/proxy-list-http.txt"
-]
-
-# scrape proxies from a given destination
-class ProxyScraper():
- def Scraper(proxy_list):
- for source in proxy_source_list:
- response = requests.get(source,headers={"User-Agent":"nn-downloader/1.0 (by Official Husko on GitHub)"},timeout=10)
- proxy_raw = response.text
- split_proxies = proxy_raw.split()
- for proxy in split_proxies:
- if proxy in proxy_list:
- break
- else:
- proxyy = {"http": proxy}
- proxy_list.append(proxyy)
+import requests
+
+proxy_source_list = [
+ "https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/http.txt",
+ "https://raw.githubusercontent.com/ShiftyTR/Proxy-List/master/http.txt",
+ "https://raw.githubusercontent.com/mmpx12/proxy-list/master/http.txt",
+ "https://raw.githubusercontent.com/Volodichev/proxy-list/main/http.txt",
+ "https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt",
+ "https://raw.githubusercontent.com/roma8ok/proxy-list/main/proxy-list-http.txt"
+]
+
+# scrape proxies from a given destination
+class ProxyScraper():
+ def Scraper(proxy_list):
+ for source in proxy_source_list:
+ response = requests.get(source,headers={"User-Agent":"nn-downloader/1.0 (by Official Husko on GitHub)"},timeout=10)
+ proxy_raw = response.text
+ split_proxies = proxy_raw.split()
+ for proxy in split_proxies:
+ if proxy in proxy_list:
+ break
+ else:
+ proxyy = {"http": proxy}
+ proxy_list.append(proxyy)
return proxy_list
\ No newline at end of file
diff --git a/modules/rule34.py b/modules/rule34.py
index a49c772..5657a2d 100644
--- a/modules/rule34.py
+++ b/modules/rule34.py
@@ -1,83 +1,83 @@
-import requests
-import random
-from termcolor import colored
-from alive_progress import alive_bar
-from time import sleep
-from datetime import datetime
-import os
-
-from main import unsafe_chars
-
-now = datetime.now()
-dt_now = now.strftime("%d-%m-%Y_%H-%M-%S")
-
-class RULE34():
- @staticmethod
- def fetcher(user_tags, user_blacklist, proxy_list, max_sites, user_proxies, header, db):
- try:
- approved_list = []
- page = 1
-
- while True:
- URL = f"https://api.rule34.xxx/index.php?page=dapi&s=post&q=index&pid={page}&limit=1000&json=1&tags={user_tags}"
- proxy = random.choice(proxy_list) if user_proxies else None
- raw_req = requests.get(URL, headers=header, proxies=proxy)
- req = raw_req.json()
-
- if not req:
- print(colored("No images found or all downloaded! Try different tags.", "yellow"))
- sleep(5)
- break
- elif page == max_sites:
- print(colored(f"Finished Downloading {max_sites} of {max_sites} pages.", "yellow"))
- sleep(5)
- break
- else:
- for item in req:
- post_tags = str.split(item["tags"])
- if any(tag in user_blacklist for tag in post_tags):
- continue # Skip image if any blacklisted tag is found
-
- image_address = item["file_url"]
- image_name = item["image"]
- image_id = item["id"]
-
- if db is False or str(image_id) not in db:
- image_data = {"image_address": image_address, "image_name": image_name, "image_id": image_id}
- approved_list.append(image_data)
-
- with alive_bar(len(approved_list), calibrate=1, dual_line=True, title='Downloading') as bar:
- for data in approved_list:
- image_address = data["image_address"]
- image_name = data["image_name"]
- image_id = data["image_id"]
- image_format = image_address.rpartition(".")
- bar.text = f'-> Downloading: {image_id}, please wait...'
-
- proxy = random.choice(proxy_list) if user_proxies else None
- img_data = requests.get(image_address, proxies=proxy).content if user_proxies else requests.get(image_address).content
-
- safe_user_tags = user_tags.replace(" ", "_")
- for char in unsafe_chars:
- safe_user_tags = safe_user_tags.replace(char, "")
-
- directory = f"media/{dt_now}_{safe_user_tags}"
- os.makedirs(directory, exist_ok=True)
-
- with open(f"{directory}/{str(image_id)}.{image_format[-1]}", 'wb') as handler:
- handler.write(img_data)
-
- if db != False:
- with open("db/rule34.db", "a") as db_writer:
- db_writer.write(f"{str(image_id)}\n")
-
- bar()
-
- print(colored(f"Page {page} Completed", "green"))
- approved_list.clear()
- page += 1
-
- return {"status": "ok"}
-
- except Exception as e:
- return {"status": "error", "uinput": user_tags, "exception": str(e), "extra": raw_req.content}
+import requests
+import random
+from termcolor import colored
+from alive_progress import alive_bar
+from time import sleep
+from datetime import datetime
+import os
+
+from main import unsafe_chars
+
+now = datetime.now()
+dt_now = now.strftime("%d-%m-%Y_%H-%M-%S")
+
+class RULE34():
+ @staticmethod
+ def fetcher(user_tags, user_blacklist, proxy_list, max_sites, user_proxies, header, db):
+ try:
+ approved_list = []
+ page = 1
+
+ while True:
+ URL = f"https://api.rule34.xxx/index.php?page=dapi&s=post&q=index&pid={page}&limit=1000&json=1&tags={user_tags}"
+ proxy = random.choice(proxy_list) if user_proxies else None
+ raw_req = requests.get(URL, headers=header, proxies=proxy)
+ req = raw_req.json()
+
+ if not req:
+ print(colored("No images found or all downloaded! Try different tags.", "yellow"))
+ sleep(5)
+ break
+ elif page == max_sites:
+ print(colored(f"Finished Downloading {max_sites} of {max_sites} pages.", "yellow"))
+ sleep(5)
+ break
+ else:
+ for item in req:
+ post_tags = str.split(item["tags"])
+ if any(tag in user_blacklist for tag in post_tags):
+ continue # Skip image if any blacklisted tag is found
+
+ image_address = item["file_url"]
+ image_name = item["image"]
+ image_id = item["id"]
+
+ if db is False or str(image_id) not in db:
+ image_data = {"image_address": image_address, "image_name": image_name, "image_id": image_id}
+ approved_list.append(image_data)
+
+ with alive_bar(len(approved_list), calibrate=1, dual_line=True, title='Downloading') as bar:
+ for data in approved_list:
+ image_address = data["image_address"]
+ image_name = data["image_name"]
+ image_id = data["image_id"]
+ image_format = image_address.rpartition(".")
+ bar.text = f'-> Downloading: {image_id}, please wait...'
+
+ proxy = random.choice(proxy_list) if user_proxies else None
+ img_data = requests.get(image_address, proxies=proxy).content if user_proxies else requests.get(image_address).content
+
+ safe_user_tags = user_tags.replace(" ", "_")
+ for char in unsafe_chars:
+ safe_user_tags = safe_user_tags.replace(char, "")
+
+ directory = f"media/{dt_now}_{safe_user_tags}"
+ os.makedirs(directory, exist_ok=True)
+
+ with open(f"{directory}/{str(image_id)}.{image_format[-1]}", 'wb') as handler:
+ handler.write(img_data)
+
+ if db != False:
+ with open("db/rule34.db", "a") as db_writer:
+ db_writer.write(f"{str(image_id)}\n")
+
+ bar()
+
+ print(colored(f"Page {page} Completed", "green"))
+ approved_list.clear()
+ page += 1
+
+ return {"status": "ok"}
+
+ except Exception as e:
+ return {"status": "error", "uinput": user_tags, "exception": str(e), "extra": raw_req.content}
diff --git a/modules/yiffer.py b/modules/yiffer.py
index af339dd..27fe5d2 100644
--- a/modules/yiffer.py
+++ b/modules/yiffer.py
@@ -1,65 +1,65 @@
-import requests
-import random
-from termcolor import colored
-from time import sleep
-from alive_progress import alive_bar
-import os
-
-from main import unsafe_chars
-
-class Yiffer():
- def Fetcher(proxy_list, user_proxies, header, URL):
- try:
- # link operations
- URL = requests.utils.unquote(URL, encoding='utf-8', errors='replace')
- parts = URL.split("/")
- title = parts[3]
-
- # Get item info
- URL = f"https://yiffer.xyz/api/comics/{title}"
- if user_proxies == True:
- proxy = random.choice(proxy_list)
- raw_req = requests.get(URL, headers=header, proxies=proxy)
- else:
- raw_req = requests.get(URL, headers=header)
-
- req = raw_req.json()
-
- pages = req["numberOfPages"]
- page_range = pages + 1
-
- # Download all images
- with alive_bar(pages, calibrate=1, dual_line=True, title='Downloading') as bar:
- bar.text = f'-> Downloading: {title}, please wait...'
- progress = 0
- for number in range(1,page_range):
- progress += 1
- if progress <= 9:
- URL = f"https://static.yiffer.xyz/comics/{title}/00{progress}.jpg"
- elif progress >= 10 and progress < 100:
- URL = f"https://static.yiffer.xyz/comics/{title}/0{progress}.jpg"
- else:
- URL = f"https://static.yiffer.xyz/comics/{title}/{progress}.jpg"
- if user_proxies == True:
- proxy = random.choice(proxy_list)
- img_data = requests.get(URL, proxies=proxy).content
- else:
- sleep(1)
- img_data = requests.get(URL).content
-
- safe_title = title.replace(" ", "_")
- for char in unsafe_chars:
- safe_title = safe_title.replace(char, "")
-
- if not os.path.exists(f"media/{safe_title}"):
- os.mkdir(f"media/{safe_title}")
- with open(f"media/{safe_title}/{str(number)}.jpg", "wb") as handler:
- handler.write(img_data)
- bar()
- print("[ " + colored("i","blue") + " ] " + f"Completed downloading {title}!")
- sleep(5)
-
- return {"status": "ok"}
-
- except Exception as e:
+import requests
+import random
+from termcolor import colored
+from time import sleep
+from alive_progress import alive_bar
+import os
+
+from main import unsafe_chars
+
+class Yiffer():
+ def Fetcher(proxy_list, user_proxies, header, URL):
+ try:
+ # link operations
+ URL = requests.utils.unquote(URL, encoding='utf-8', errors='replace')
+ parts = URL.split("/")
+ title = parts[3]
+
+ # Get item info
+ URL = f"https://yiffer.xyz/api/comics/{title}"
+ if user_proxies == True:
+ proxy = random.choice(proxy_list)
+ raw_req = requests.get(URL, headers=header, proxies=proxy)
+ else:
+ raw_req = requests.get(URL, headers=header)
+
+ req = raw_req.json()
+
+ pages = req["numberOfPages"]
+ page_range = pages + 1
+
+ # Download all images
+ with alive_bar(pages, calibrate=1, dual_line=True, title='Downloading') as bar:
+ bar.text = f'-> Downloading: {title}, please wait...'
+ progress = 0
+ for number in range(1,page_range):
+ progress += 1
+ if progress <= 9:
+ URL = f"https://static.yiffer.xyz/comics/{title}/00{progress}.jpg"
+ elif progress >= 10 and progress < 100:
+ URL = f"https://static.yiffer.xyz/comics/{title}/0{progress}.jpg"
+ else:
+ URL = f"https://static.yiffer.xyz/comics/{title}/{progress}.jpg"
+ if user_proxies == True:
+ proxy = random.choice(proxy_list)
+ img_data = requests.get(URL, proxies=proxy).content
+ else:
+ sleep(1)
+ img_data = requests.get(URL).content
+
+ safe_title = title.replace(" ", "_")
+ for char in unsafe_chars:
+ safe_title = safe_title.replace(char, "")
+
+ if not os.path.exists(f"media/{safe_title}"):
+ os.mkdir(f"media/{safe_title}")
+ with open(f"media/{safe_title}/{str(number)}.jpg", "wb") as handler:
+ handler.write(img_data)
+ bar()
+ print("[ " + colored("i","blue") + " ] " + f"Completed downloading {title}!")
+ sleep(5)
+
+ return {"status": "ok"}
+
+ except Exception as e:
return {"status": "error", "uinput": uinput, "exception": str(e), "extra": raw_req.content}
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
index 5492168..6b04389 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,18 +1,18 @@
-about-time==4.2.1
-alive-progress==3.1.4
-ansicon==1.89.0
-blessed==1.20.0
-certifi==2023.7.22
-charset-normalizer==3.3.0
-grapheme==0.6.0
-idna==3.7
-inquirer==3.1.3
-jinxed==1.2.0
-python-editor==1.0.4
-readchar==4.0.5
-requests==2.32.0
-six==1.16.0
-termcolor==2.3.0
-urllib3==2.0.7
-wcwidth==0.2.8
-xmltodict==0.13.0
+about-time==4.2.1
+alive-progress==3.1.4
+ansicon==1.89.0
+blessed==1.20.0
+certifi==2023.7.22
+charset-normalizer==3.3.0
+grapheme==0.6.0
+idna==3.7
+inquirer==3.1.3
+jinxed==1.2.0
+python-editor==1.0.4
+readchar==4.0.5
+requests==2.32.0
+six==1.16.0
+termcolor==2.3.0
+urllib3==2.2.2
+wcwidth==0.2.8
+xmltodict==0.13.0
\ No newline at end of file
diff --git a/run.bat b/run.bat
deleted file mode 100644
index 2a11aca..0000000
--- a/run.bat
+++ /dev/null
@@ -1 +0,0 @@
-conda activate ".\.env" && python main.py
\ No newline at end of file