update
parent
72bd5f9ba7
commit
9516890416
@ -1,160 +1,4 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/#use-with-ide
|
||||
.pdm.toml
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
# Content
|
||||
storysaver/
|
||||
facebook/
|
||||
media/
|
||||
@ -0,0 +1,17 @@
|
||||
you have not failed enough
|
||||
you care what other people think
|
||||
you think youre smarter than everyone
|
||||
you lack curiousity
|
||||
you dont ask enough qestions
|
||||
you cant handle the truth
|
||||
you dont see opportunities
|
||||
|
||||
resell to the people you already sold
|
||||
staff either save you time or make you money
|
||||
sell on people's weaknesses, insecurities and addictions
|
||||
make people think they NEED your product
|
||||
any business can be started with no money
|
||||
business is money IN and not money OUT
|
||||
take money, not make money
|
||||
use whatever you've got
|
||||
act with speed
|
||||
@ -0,0 +1,99 @@
|
||||
fix recorder
|
||||
recover missing streams
|
||||
re-encode all videos to 10 seconds
|
||||
scan last dood videos with missing download_link
|
||||
delete all "deleted" self-hosted videosdump all useless self-hosted videos to mix/dood
|
||||
fix camsrip crawler
|
||||
move camsrip to new server
|
||||
finish converting download_link faster
|
||||
check failed mixdrop uploads
|
||||
add cache for .ts files bunny/nvme
|
||||
manage all domains
|
||||
pay for onlycats domain
|
||||
onlyfans crawler
|
||||
telegram crawler
|
||||
optimize instagram crawler
|
||||
do ethernet cables
|
||||
get access to xn
|
||||
paint light switches/phone case/pc cases
|
||||
microscope shorts
|
||||
fix / sell scooter
|
||||
paperless ngx
|
||||
do hand scan
|
||||
go to psychiatrist
|
||||
do general checkup on body
|
||||
fix and brush teeth
|
||||
SFP and NTP
|
||||
phising ig
|
||||
xss tate shop
|
||||
finish and improve opsec
|
||||
delete internet t
|
||||
clean cry
|
||||
warm up pay
|
||||
install wasabi
|
||||
install / try gaming linux
|
||||
finish atrazat on
|
||||
set up nas
|
||||
dump last stories
|
||||
photoshop originals
|
||||
finish ab recoverer/cleaner
|
||||
fix controller
|
||||
fix hdd 100% load on video server
|
||||
replace exoclick
|
||||
fake comments bot
|
||||
advanced tags/streamer data bot
|
||||
self host all thumbs with bunny
|
||||
reupload all dmca'd videos with new id's
|
||||
generate shorts
|
||||
use user's tokens to record private shows
|
||||
create alert system
|
||||
set up streaming server
|
||||
minimize amount of scripts i need
|
||||
normalize database
|
||||
load balancers for web server
|
||||
set up recu downloader
|
||||
handle premium expired / purchases and upgrades
|
||||
create bunny-like database and api for videos
|
||||
save file sizes for videos
|
||||
add payment options like paypal, usdt and more
|
||||
re-generate thumbs for all videos self-hosted
|
||||
download all mixdrop/dood/xpo videos
|
||||
add streamate and cherrytv to recorder and website
|
||||
delete stripchat dupes
|
||||
delete "fav" dupes
|
||||
blacklist ruta and other dmca agencies's crawlers
|
||||
send emails to potential premiums
|
||||
fix streamers db having 2 queries with and wuthout gender
|
||||
create storage manager for recorder
|
||||
visualize nginx logs to track dmca bots
|
||||
append all cutoff streams
|
||||
add ssh keys
|
||||
|
||||
|
||||
frontend:
|
||||
add forums
|
||||
add width sections for video player
|
||||
coins/credit system (basically affiliate)
|
||||
enable user uploaded content
|
||||
performer accounts
|
||||
advanced search system
|
||||
affiliate system - optimize and create a panel where i can easily manage all
|
||||
sort by dmca and most popular on /profile
|
||||
change comments, follow and save to js
|
||||
add payment options
|
||||
optimize history/following
|
||||
create contests and affiliates for premium
|
||||
"copy" saved videos
|
||||
keep views uncached on main page
|
||||
add heatmap for player
|
||||
fix missing animated thumbs in saved page
|
||||
fix duplicates in saved videos page
|
||||
add ip logging for security
|
||||
require phone numbers for logging in?
|
||||
add recu affiliate?
|
||||
fix history dupes
|
||||
try node.js to get the mp4 url from mixdrop
|
||||
add profile pictures in search
|
||||
add collections
|
||||
mark premium videos
|
||||
add credit card payment with skrill or others
|
||||
@ -0,0 +1,62 @@
|
||||
import os
|
||||
import tarfile
|
||||
from datetime import datetime
|
||||
import sys # Import sys for command line arguments
|
||||
from BunnyCDN.Storage import Storage
|
||||
|
||||
def is_hidden(path):
|
||||
"""
|
||||
Check if the given path is a hidden folder or file.
|
||||
"""
|
||||
return path.startswith('.') or '/.' in path
|
||||
|
||||
def should_exclude(path, excluded_items):
|
||||
"""
|
||||
Check if the given path should be excluded.
|
||||
"""
|
||||
if is_hidden(path):
|
||||
return True
|
||||
for item in excluded_items:
|
||||
if path.startswith(item):
|
||||
return True
|
||||
return False
|
||||
|
||||
def backup(folder_path, excluded_folders=[], excluded_files=[]):
|
||||
"""
|
||||
Create a compressed backup of the specified folder, excluding specified items and hidden folders.
|
||||
"""
|
||||
timestamp = int(datetime.timestamp(datetime.now()))
|
||||
backup_file = os.path.join(folder_path, f'backup-{timestamp}.tar.gz')
|
||||
|
||||
with tarfile.open(backup_file, "w:gz") as tar:
|
||||
for root, dirs, file_names in os.walk(folder_path):
|
||||
if should_exclude(root, excluded_folders):
|
||||
continue
|
||||
|
||||
for file_name in file_names:
|
||||
file_path = os.path.join(root, file_name)
|
||||
|
||||
if should_exclude(file_path, excluded_files):
|
||||
continue
|
||||
|
||||
print("Adding %s" % file_path)
|
||||
tar.add(file_path, arcname=os.path.relpath(file_path, start=folder_path))
|
||||
|
||||
return backup_file
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) != 2:
|
||||
print("Usage: python script.py <folder_path>")
|
||||
sys.exit(1)
|
||||
|
||||
folder_path = sys.argv[1]
|
||||
|
||||
if not os.path.isdir(folder_path):
|
||||
print(f"Error: The folder '{folder_path}' does not exist.")
|
||||
sys.exit(1)
|
||||
|
||||
backup_file = backup(folder_path)
|
||||
|
||||
obj_storage = Storage('99f4c72b-2674-4e6a-a1825c269cc0-b959-48a1', 'ab-backups')
|
||||
obj_storage.PutFile(backup_file, f'backups/{os.path.basename(backup_file)}')
|
||||
print("Backup and upload successful.")
|
||||
@ -0,0 +1,10 @@
|
||||
ChallengeResolve: Unknown step_name "submit_phone" for "olivercury" in challenge resolver: {'step_name': 'submit_phone', 'step_data': {'phone_number': '+972522618221', 'show_whatsapp_otp_choice': True, 'whatsapp': False}, 'flow_render_type': 3, 'bloks_action': 'com.instagram.challenge.navigation.take_challenge', 'cni': 18436897147040850, 'challenge_context': 'Af6pVKkiomiOMxWvLzouGukazqMMhFbzNERezSMhBU-dHrO_DNGfTJpUPp8-di6HHm8WfAfL6_PQaLkV6sOkb6CC68ugfQtLMd3OgMVasZkOI5O6YdnoqMtBzNBGd944VtUNEEkl9bNVM5yQbfMskCuKTUf7AQOIYD2zEuvd8wC-AUBPziP105a1xq3GbaSeyJ9QnEJHHWgpFenBURUNbdLvQ9lzs5j62zCxo_0fe4Fw', 'challenge_type_enum_str': 'SMS', 'status': 'ok'}
|
||||
requests.exceptions.HTTPError: 401 Client Error: Unauthorized for url: https://www.instagram.com/graphql/query/?variables=%7B%22user_id%22%3A%226208321762%22%2C%22include_reel%22%3Atrue%7D&query_hash=ad99dd9d3646cc3c0dda65debcd266a7
|
||||
|
||||
During handling of the above exception, another exception occurred:
|
||||
|
||||
instagrapi.exceptions.ClientUnauthorizedError: 401 Client Error: Unauthorized for url: https://www.instagram.com/graphql/query/?variables=%7B%22user_id%22%3A%226208321762%22%2C%22include_reel%22%3Atrue%7D&query_hash=ad99dd9d3646cc3c0dda65debcd266a7
|
||||
|
||||
During handling of the above exception, another exception occurred:
|
||||
|
||||
requests.exceptions.HTTPError: 401 Client Error: Unauthorized for url: https://i.instagram.com/api/v1/users/6208321762/info/
|
||||
@ -0,0 +1,82 @@
|
||||
from BunnyCDN.Storage import Storage
|
||||
from datetime import datetime
|
||||
import os, config, funcs
|
||||
from PIL import Image
|
||||
|
||||
def dump_instagram(folder_path):
|
||||
for filename in os.listdir(folder_path):
|
||||
parts = filename.split('_')
|
||||
|
||||
try:
|
||||
username = '_'.join(parts[:-2]) # Join all except last two
|
||||
timestamp = int(parts[-2]) # Second last is timestamp
|
||||
user_id = int(parts[-1].split('.')[0]) # Last part before extension is user_id
|
||||
except Exception as e:
|
||||
print(f"Invalid filename: {filename}. Error: {e}")
|
||||
continue
|
||||
|
||||
filepath = os.path.join(folder_path, filename)
|
||||
|
||||
mediatype = funcs.get_media_type(filename)
|
||||
post_type = funcs.determine_post_type(filepath, mediatype)
|
||||
|
||||
UploadMedia(username=username, media_type=mediatype, filepath=filepath, post_type=post_type, timestamp=timestamp, user_id=user_id)
|
||||
|
||||
|
||||
def UploadMedia(filepath, username, media_type='image', post_type='story', timestamp=None, user_id=None):
|
||||
if 'tero' in username:
|
||||
pass
|
||||
|
||||
filename = os.path.basename(filepath)
|
||||
file_extension = os.path.splitext(filename)[1].lower()
|
||||
|
||||
file_hash = funcs.calculate_file_hash(filepath)
|
||||
|
||||
duration = funcs.get_video_duration(filepath) if media_type == 'video' else 0
|
||||
|
||||
post_date = datetime.fromtimestamp(int(timestamp)) if timestamp else datetime.now()
|
||||
|
||||
dirtype = funcs.determine_post_type(filepath, media_type)
|
||||
|
||||
server_path = f'media/{dirtype}/{username}/{file_hash}{file_extension}'
|
||||
|
||||
file_url = f"https://storysave.b-cdn.net/{server_path}"
|
||||
|
||||
if file_hash in existing_files:
|
||||
print('Duplicate file detected. Removing...')
|
||||
os.remove(filepath)
|
||||
return True
|
||||
|
||||
obj_storage.PutFile(filepath, server_path)
|
||||
|
||||
if media_type == 'image':
|
||||
with Image.open(filepath) as img:
|
||||
width, height = img.size
|
||||
else:
|
||||
width, height = funcs.get_video_dimensions(filepath)
|
||||
|
||||
query = "INSERT IGNORE INTO media (username, media_type, media_url, width, height, post_type, date, user_id, hash, filename, duration) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
|
||||
values = (username, media_type, file_url, width, height, post_type, post_date, user_id, file_hash, filename, duration)
|
||||
|
||||
newCursor.execute(query, values)
|
||||
newDB.commit()
|
||||
print(f'[{newCursor.rowcount}] records updated. File {filename} uploaded to {file_url}')
|
||||
|
||||
os.remove(filepath)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print('Starting processing...')
|
||||
|
||||
newDB, newCursor = config.gen_connection()
|
||||
|
||||
obj_storage = Storage('345697f9-d9aa-4a6b-a5ec8bffc16d-ceaf-453e', 'storysave')
|
||||
|
||||
newCursor.execute("SELECT hash FROM media WHERE platform='instagram' AND hash IS NOT NULL")
|
||||
existing_files = [image[0] for image in newCursor.fetchall()]
|
||||
|
||||
dump_instagram('storysaver/missingdata/')
|
||||
|
||||
print("Processing completed.")
|
||||
@ -0,0 +1,19 @@
|
||||
import os, config, funcs
|
||||
|
||||
if __name__ == '__main__':
|
||||
print('Starting processing...')
|
||||
|
||||
newDB, newCursor = config.gen_connection()
|
||||
|
||||
newCursor.execute("SELECT hash FROM media WHERE platform='instagram' AND hash IS NOT NULL")
|
||||
existing_files = [image[0] for image in newCursor.fetchall()]
|
||||
|
||||
files = os.listdir('storysaver/missingdata/')
|
||||
|
||||
for file in files:
|
||||
filePath = os.path.join('storysaver/missingdata/', file)
|
||||
file_hash = funcs.calculate_file_hash(filePath)
|
||||
|
||||
if file_hash in existing_files:
|
||||
print(f'Duplicate file detected. Removing {filePath}...')
|
||||
os.rename(filePath, f'storysaver/dupes/{file}')
|
||||
Loading…
Reference in New Issue