Roop-Unleashed4.1.1 .Ipynb
Roop-Unleashed4.1.1 .Ipynb
3","name":"python3"},"language_info":
{"name":"python","version":"3.10.13","mimetype":"text/x-python","codemirror_mode":
{"name":"ipython","version":3},"pygments_lexer":"ipython3","nbconvert_exporter":"py
thon","file_extension":".py"},"kaggle":
{"accelerator":"nvidiaTeslaT4","dataSources":
[{"sourceId":6898670,"sourceType":"datasetVersion","datasetId":3954404},
{"sourceId":6925544,"sourceType":"datasetVersion","datasetId":3953428}],"dockerImag
eVersionId":30747,"isInternetEnabled":true,"language":"python","sourceType":"notebo
ok","isGpuEnabled":true}},"nbformat_minor":4,"nbformat":4,"cells":
[{"cell_type":"markdown","source":"# Colab for roop-unleashed - Gradio version\
nhttps://github.com/C0untFloyd/roop-unleashed\n","metadata":{"id":"G9BdiCppV6AS"}},
{"cell_type":"markdown","source":"Installing & preparing requirements","metadata":
{"id":"0ZYRNb0AWLLW"}},{"cell_type":"markdown","source":"# Installation
","metadata":{}},{"cell_type":"code","source":"from IPython.display import
clear_output\nimport torch\nimport codecs\n\nif torch.cuda.is_available():\n !
apt-get update\n !apt-get install -y nvidia-cuda-toolkit\n device = \"cuda\"\
n print(\"Using GPU\")\nelse:\n device = \"cpu\"\n print(\"Using CPU\")\n\
ngit_repo_rot13 = 'uggcf://tvguho.pbz/P0hagSyblq/ebbc-hayrnfurq.tvg'\ngit_repo =
codecs.decode(git_repo_rot13, 'rot 13')\n\ndirectory_rot13 = 'ebbc-hayrnfurq'\
ndirectory = codecs.decode(directory_rot13, 'rot 13')\n\n# Clone the repository\n!
git clone $git_repo /kaggle/working/$directory\n\n# Change directory to the cloned
repository\n%cd /kaggle/working/$directory\n\n# Install requirements if they exist\
n!pip install -r requirements.txt\n\n# Install additional package\n!pip install
onnxruntime-gpu\n","metadata":
{"id":"t1yPuhdySqCq","trusted":true},"execution_count":null,"outputs":[]},
{"cell_type":"markdown","source":"# Launch with Pinggy","metadata":{}},
{"cell_type":"code","source":"from multiprocessing import Process\nimport sys\
nimport time\nimport codecs\n\n# Ensure the log file exists and is empty\n!touch
log.txt\nopen('log.txt', 'w').close()\n\ndef run_app():\n directory_rot13 =
'ebbc-hayrnfurq'\n directory = codecs.decode(directory_rot13, 'rot 13')\n cmd
= f\"python /kaggle/working/$directory/run.py & ssh -o StrictHostKeyChecking=no -p
80 -R 0:localhost:7860 a.pinggy.io > log.txt\"\n get_ipython().system(cmd)\
n \ndef print_url():\n print(\"waiting for output\")\n time.sleep(2)\n
sys.stdout.flush()\n \n found = False\n while not found:\n with
open('log.txt', 'r') as file:\n end_word = '.pinggy.io'\n for
line in file:\n start_index = line.find(\"http://\")\n
if start_index != -1:\n end_index = line.find(end_word,
start_index)\n if end_index != -1:\n
print(\"😁 😁 😁\")\n print(\"URL: \" +
line[start_index:end_index + len(end_word)])\n print(\"😁 😁
😁\")\n found = True\n break\n
if not found:\n time.sleep(2) # Sleep before checking again\n\np_app =
Process(target=run_app)\np_url = Process(target=print_url)\np_app.start()\
np_url.start()\np_app.join()\np_url.join()\n\n","metadata":
{"trusted":true},"execution_count":null,"outputs":[]},
{"cell_type":"markdown","source":"# Run With Local Tunnel","metadata":{}},
{"cell_type":"code","source":"import subprocess\nimport threading\nimport time\
nimport socket\nimport urllib.request\nfrom colorama import Fore, Style\n\n#
Install localtunnel\nsubprocess.run([\"npm\", \"install\", \"-
g\", \"localtunnel\"])\n\ndef iframe_thread(port):\n while True:\n
time.sleep(0.5)\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n
result = sock.connect_ex(('127.0.0.1', port))\n if result == 0:\n
break\n sock.close()\n\n # Fetch and display the external IP address\n
external_ip =
urllib.request.urlopen('https://ipv4.icanhazip.com').read().decode('utf8').strip(\"
\\n\")\n print(Fore.GREEN + \"\\nIP: \", Fore.RED, external_ip, \"\\n\",
Style.RESET_ALL)\n\n # Start the LocalTunnel process\n p =
subprocess.Popen([\"lt\", \"--port\", \"{}\".format(port)],
stdout=subprocess.PIPE)\n for line in p.stdout:\n print(line.decode(),
end='')\n\n# Start the iframe thread to check for the local server\
nthreading.Thread(target=iframe_thread, daemon=True, args=(7860,)).start()\n\n#
Start your main application (e.g., Gradio app)\
nsubprocess.run([\"python\", \"run.py\"])","metadata":
{"trusted":true},"execution_count":null,"outputs":[]},
{"cell_type":"markdown","source":"# Flask For Ngrok","metadata":{}},
{"cell_type":"code","source":"!pip install flask pyngrok\n\nfrom flask import
Flask\nfrom pyngrok import ngrok\nimport threading\n\napp = Flask(__name__)\n\
[email protected]('/')\ndef hello_world():\n return 'Hello, World!'\n\ndef
run_flask():\n app.run(port=7860)\n\n# Start ngrok and expose the Flask app\ndef
run_ngrok():\n public_url = ngrok.connect(port=7860)\n print(\" * Running on
ngrok:\", public_url)\n\n# Start the Flask server and ngrok in separate threads\
nflask_thread = threading.Thread(target=run_flask)\nflask_thread.start()\n\
nngrok_thread = threading.Thread(target=run_ngrok)\
nngrok_thread.start()","metadata":
{"trusted":true},"execution_count":null,"outputs":[]},
{"cell_type":"markdown","source":"# Run with Ngrok","metadata":{}},
{"cell_type":"code","source":"!pip install pyngrok\n\nfrom pyngrok import ngrok\
nimport os\nimport threading\nimport subprocess\nimport codecs\n\n# Function to
establish ngrok tunnel\ndef start_ngrok_tunnel():\n global ngrok_tunnel\n
try:\n ngrok_tunnel = ngrok.connect(addr=\"7861\", proto=\"http\")\n
print(\"Ngrok Tunnel URL:\", ngrok_tunnel.public_url)\n except Exception as e:\n
print(\"Error starting ngrok tunnel:\", e)\n\n# Set your ngrok authtoken here\
nngrok.set_auth_token(\"2cEUTBoN0RMOyEw0bM15YBLuikc_6BMTZJoVvQEjDFgunUqbz\")\n\n#
Terminate any existing ngrok sessions\ntry:\n active_tunnels =
ngrok.get_tunnels()\n for tunnel in active_tunnels:\n
ngrok.disconnect(tunnel.public_url)\nexcept Exception as e:\n print(\"Error
retrieving or disconnecting active ngrok tunnels:\", e)\n\n# Start ngrok tunnel in
a separate thread and wait for it to establish\nngrok_tunnel = None\nngrok_thread =
threading.Thread(target=start_ngrok_tunnel)\nngrok_thread.start()\
nngrok_thread.join()\n\n# Decode directory using ROT13\ndef
decode_directory(rot13_encoded_directory):\n return
codecs.decode(rot13_encoded_directory, 'rot_13')\n\n# Directory with ROT13
encoding\ndirectory_rot13 = 'ebbc-hayrnfurq'\ndirectory =
decode_directory(directory_rot13)\nprint(f\"Decoded directory: {directory}\")\n\n#
Change directory to the decoded
directory\nos.chdir(f\"/kaggle/working/{directory}\")\n\n# Execute the command to
start the run.py script with CUDA execution providers\ncommand = \"python run.py\"\
nsubprocess.run(command, shell=True)\n\n# Disconnect the ngrok tunnel\nif
ngrok_tunnel:\n try:\n ngrok.disconnect(ngrok_tunnel.public_url)\n
print(\"Ngrok tunnel disconnected.\")\n except Exception as e:\n
print(\"Error disconnecting ngrok tunnel:\", e)\n","metadata":
{"trusted":true},"execution_count":null,"outputs":[]}]}