{ "cells": [ { "cell_type": "markdown", "source": [ "# Plug and Play" ], "metadata": {} }, { "cell_type": "code", "execution_count": 3, "source": [ "!apt-get update && apt-get install git-lfs\n", "!git-lfs clone https://huggingface.co/spaces/hysts/PnP-diffusion-features\n", "%cd PnP-diffusion-features\n", "!pip install -r requirements.txt\n", "!git clone https://github.com/MichalGeyer/plug-and-play\n", "!pip install clip taming-transformers-rom1504 \n", "!pip install git+https://github.com/CompVis/taming-transformers\n", "!pip install -U transformers" ], "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "/notebooks/PnP-diffusion-features\n" ] } ], "metadata": { "execution": { "iopub.execute_input": "2023-03-03T01:17:25.956120Z", "iopub.status.busy": "2023-03-03T01:17:25.955418Z", "iopub.status.idle": "2023-03-03T01:17:25.961046Z", "shell.execute_reply": "2023-03-03T01:17:25.960498Z", "shell.execute_reply.started": "2023-03-03T01:17:25.956089Z" } } }, { "cell_type": "code", "execution_count": 6, "source": [ "import sys\n", "import fileinput\n", "\n", "# with is like your try .. finally block in this case\n", "with open('app.py', 'r') as file:\n", " # read a list of lines into data\n", " data = file.readlines()\n", "\n", "# now change the 2nd line, note that you have to add a newline\n", "data[-1] = '\\ndemo.launch(share=True)'\n", "\n", "# and write everything back\n", "with open('app.py', 'w') as file:\n", " file.writelines( data )\n", "\n", " \n", "\n" ], "outputs": [], "metadata": { "execution": { "iopub.execute_input": "2023-03-03T01:25:04.928147Z", "iopub.status.busy": "2023-03-03T01:25:04.927479Z", "iopub.status.idle": "2023-03-03T01:25:04.933165Z", "shell.execute_reply": "2023-03-03T01:25:04.932581Z", "shell.execute_reply.started": "2023-03-03T01:25:04.928116Z" } } }, { "cell_type": "code", "execution_count": null, "source": [ "!python app.py --share " ], "outputs": [], "metadata": { "execution": { "iopub.execute_input": "2023-03-03T01:25:37.986838Z", "iopub.status.busy": "2023-03-03T01:25:37.986259Z", "iopub.status.idle": "2023-03-03T01:30:55.551780Z", "shell.execute_reply": "2023-03-03T01:30:55.551040Z", "shell.execute_reply.started": "2023-03-03T01:25:37.986811Z" } } }, { "cell_type": "markdown", "source": [ "# Self Attention Guidance" ], "metadata": {} }, { "cell_type": "code", "execution_count": null, "source": [ "!apt-get update && apt-get install git-lfs\n", "!git-lfs clone https://huggingface.co/spaces/susunghong/Self-Attention-Guidance\n", "%cd Self-Attention-Guidance\n", "!pip install -r requirements.txt\n", "!pip install -U diffusers\n", "!pip install gradio\n", "!pip install -U transformers " ], "outputs": [], "metadata": { "execution": { "iopub.execute_input": "2023-03-03T01:33:39.538102Z", "iopub.status.busy": "2023-03-03T01:33:39.537496Z", "iopub.status.idle": "2023-03-03T01:33:42.779249Z", "shell.execute_reply": "2023-03-03T01:33:42.778304Z", "shell.execute_reply.started": "2023-03-03T01:33:39.538077Z" } } }, { "cell_type": "code", "execution_count": 12, "source": [ "import sys\n", "import fileinput\n", "\n", "# with is like your try .. finally block in this case\n", "with open('app.py', 'r') as file:\n", " # read a list of lines into data\n", " data = file.readlines()\n", "\n", "# now change the 2nd line, note that you have to add a newline\n", "data[-5] = '\\n demo.launch(share=True)'\n", "\n", "# and write everything back\n", "with open('app.py', 'w') as file:\n", " file.writelines( data )\n" ], "outputs": [], "metadata": { "execution": { "iopub.execute_input": "2023-03-03T01:32:54.407719Z", "iopub.status.busy": "2023-03-03T01:32:54.407409Z", "iopub.status.idle": "2023-03-03T01:32:54.417801Z", "shell.execute_reply": "2023-03-03T01:32:54.416842Z", "shell.execute_reply.started": "2023-03-03T01:32:54.407694Z" } } }, { "cell_type": "code", "execution_count": null, "source": [ "!python app.py --share" ], "outputs": [], "metadata": { "execution": { "iopub.execute_input": "2023-03-03T01:33:44.067013Z", "iopub.status.busy": "2023-03-03T01:33:44.066139Z", "iopub.status.idle": "2023-03-03T01:40:12.873547Z", "shell.execute_reply": "2023-03-03T01:40:12.872797Z", "shell.execute_reply.started": "2023-03-03T01:33:44.066975Z" } } }, { "cell_type": "markdown", "source": [ "# Universal Guided Diffusion" ], "metadata": {} }, { "cell_type": "code", "execution_count": null, "source": [ "%cd ~/../notebooks\n", "!git clone https://github.com/arpitbansal297/Universal-Guided-Diffusion\n", "%cd Universal-Guided-Diffusion/stable-diffusion-guided\n", "!pip install -e .\n", "!pip install GPUtil\n", "!pip install blobfile\n", "!pip install facenet-pytorch\n", "!pip install invisible-watermark diffusers clip kornia deepface\n", "!pip install -U transformers\n", "!git clone https://github.com/CompVis/taming-transformers\n", "!cp -r taming-transformers/taming ./" ], "outputs": [], "metadata": { "execution": { "iopub.execute_input": "2023-03-03T02:06:44.401522Z", "iopub.status.busy": "2023-03-03T02:06:44.401241Z", "iopub.status.idle": "2023-03-03T02:07:04.535073Z", "shell.execute_reply": "2023-03-03T02:07:04.534263Z", "shell.execute_reply.started": "2023-03-03T02:06:44.401500Z" } } }, { "cell_type": "markdown", "source": [ "### Face recognition" ], "metadata": {} }, { "cell_type": "code", "execution_count": null, "source": [ "!mkdir test_face\n", "!cp scripts/* ./\n", "!python face_detection.py --indexes 0 --text \"Headshot of a person with blonde hair with space background\" --optim_forward_guidance --fr_crop --optim_num_steps 2 --optim_forward_guidance_wt 20000 --optim_original_conditioning --ddim_steps 5 --optim_folder ./test_face/text_type_4/ --ckpt ../../../datasets/stable-diffusion-classic/v1-5-pruned-emaonly.ckpt\n", "!python face_detection.py --indexes 0 --text \"A headshot of a woman looking like a lara croft\" --optim_forward_guidance --fr_crop --optim_num_steps 2 --optim_forward_guidance_wt 20000 --optim_original_conditioning --ddim_steps 500 --optim_folder ./test_face/text_type_11/ --ckpt ../../../datasets/stable-diffusion-classic/v1-5-pruned-emaonly.ckpt\n" ], "outputs": [], "metadata": { "execution": { "iopub.execute_input": "2023-03-03T02:09:22.772652Z", "iopub.status.busy": "2023-03-03T02:09:22.772063Z" } } }, { "cell_type": "code", "execution_count": null, "source": [ "!python face_top_k.py --folder ./test_face/text_type_4/ --img_index 0 --img_saved 20 --top_k 5\n", "!python face_top_k.py --folder ./test_face/text_type_11/ --img_index 0 --img_saved 20 --top_k 5" ], "outputs": [], "metadata": { "execution": { "iopub.execute_input": "2023-03-03T02:02:54.886820Z", "iopub.status.busy": "2023-03-03T02:02:54.886068Z", "iopub.status.idle": "2023-03-03T02:03:18.634801Z", "shell.execute_reply": "2023-03-03T02:03:18.634183Z", "shell.execute_reply.started": "2023-03-03T02:02:54.886792Z" } } }, { "cell_type": "markdown", "source": [ "### Segmentation\n" ], "metadata": {} }, { "cell_type": "code", "execution_count": null, "source": [ "!mkdir test_segmentation\n", "!python segmentation.py --indexes 1 --text \"Walker hound, Walker foxhound on snow\" --scale 1.5 --optim_forward_guidance --optim_num_steps 10 --optim_forward_guidance_wt 400 --optim_original_conditioning --ddim_steps 500 --optim_folder ./test_segmentation/text_type_4/ --ckpt ../../../datasets/stable-diffusion-classic/v1-5-pruned-emaonly.ckpt\n", "!python segmentation.py --indexes 1 --text \"Walker hound, Walker foxhound as an oil painting\" --scale 2.0 --optim_forward_guidance --optim_num_steps 10 --optim_forward_guidance_wt 400 --optim_original_conditioning --ddim_steps 500 --optim_folder ./test_segmentation/text_type_3/ --ckpt ../../../datasets/stable-diffusion-classic/v1-5-pruned-emaonly.ckpt\n" ], "outputs": [], "metadata": { "execution": { "iopub.execute_input": "2023-03-02T23:43:13.869062Z", "iopub.status.busy": "2023-03-02T23:43:13.868461Z", "iopub.status.idle": "2023-03-02T23:44:25.706654Z", "shell.execute_reply": "2023-03-02T23:44:25.705960Z", "shell.execute_reply.started": "2023-03-02T23:43:13.869029Z" } } }, { "cell_type": "markdown", "source": [ "### Object Detection" ], "metadata": {} }, { "cell_type": "code", "execution_count": null, "source": [ "!mkdir test_od\n", "!python segmentation.py --indexes 0 --text \"a headshot of a woman with a dog\" --scale 1.5 --optim_forward_guidance --optim_num_steps 5 --optim_forward_guidance_wt 100 --optim_original_conditioning --ddim_steps 250 --optim_folder ./test_od/ --ckpt ../../../datasets/stable-diffusion-classic/v1-5-pruned-emaonly.ckpt\n", "!python segmentation.py --indexes 0 --text \"a headshot of a woman with a dog on beach\" --scale 1.5 --optim_forward_guidance --optim_num_steps 5 --optim_forward_guidance_wt 100 --optim_original_conditioning --ddim_steps 250 --optim_folder ./test_od/ --ckpt ../../../datasets/stable-diffusion-classic/v1-5-pruned-emaonly.ckpt" ], "outputs": [], "metadata": { "execution": { "iopub.execute_input": "2023-03-02T23:44:58.714404Z", "iopub.status.busy": "2023-03-02T23:44:58.714115Z", "iopub.status.idle": "2023-03-02T23:45:49.154756Z", "shell.execute_reply": "2023-03-02T23:45:49.154020Z", "shell.execute_reply.started": "2023-03-02T23:44:58.714379Z" } } }, { "cell_type": "markdown", "source": [ "### Style transfer" ], "metadata": {} }, { "cell_type": "code", "execution_count": null, "source": [ "!mkdir test_style\n", "!python style_transfer.py --indexes 0 --text \"A colorful photo of a eiffel tower\" --scale 3.0 --optim_forward_guidance --optim_num_steps 6 --optim_forward_guidance_wt 6 --optim_original_conditioning --ddim_steps 500 --optim_folder ./test_style/text_type_1/ --ckpt ../../../datasets/stable-diffusion-classic/v1-5-pruned-emaonly.ckpt\n", "!python style_transfer.py --indexes 0 --text \"A fantasy photo of volcanoes\" --scale 3.0 --optim_forward_guidance --optim_num_steps 6 --optim_forward_guidance_wt 6 --optim_original_conditioning --ddim_steps 500 --optim_folder ./test_style/text_type_2/ --ckpt ../../../datasets/stable-diffusion-classic/v1-5-pruned-emaonly.ckpt\n" ], "outputs": [], "metadata": { "execution": { "iopub.execute_input": "2023-03-02T23:47:18.620122Z", "iopub.status.busy": "2023-03-02T23:47:18.619322Z", "iopub.status.idle": "2023-03-02T23:47:36.387742Z", "shell.execute_reply": "2023-03-02T23:47:36.387005Z", "shell.execute_reply.started": "2023-03-02T23:47:18.620084Z" } } }, { "cell_type": "markdown", "source": [ "# Cross Domain Composting\n", "\n", "> This is a work in progress section. " ], "metadata": {} }, { "cell_type": "code", "execution_count": 1, "source": [ "!wget https://repo.anaconda.com/miniconda/Miniconda3-py39_23.1.0-1-Linux-x86_64.sh" ], "outputs": [], "metadata": { "execution": { "iopub.execute_input": "2023-03-02T21:07:35.383569Z", "iopub.status.busy": "2023-03-02T21:07:35.382959Z", "iopub.status.idle": "2023-03-02T21:07:41.906451Z", "shell.execute_reply": "2023-03-02T21:07:41.905485Z", "shell.execute_reply.started": "2023-03-02T21:07:35.383546Z" } } }, { "cell_type": "markdown", "source": [ "### Now go into the terminal, and paste the following:\n", "\n", ">bash Miniconda Miniconda3-py39_23.1.0-1-Linux-x86_64.sh\n", "\n", "### Then follow the instructions to set up. Agree to their license, and run conda init at the end.\n", "\n", "### Afterwards, in the terminal paste the following\n", "\n", ">cd ~/../notebooks/\n", ">\n", ">git clone --recursive https://github.com/cross-domain-compositing/cross-domain-compositing.git \n", ">\n", ">cd cross-domain-compositing/ \n", ">\n", ">conda env create -f environment.yaml \n", ">\n", ">conda activate ldm \n", ">\n", ">wget -P models/ldm/stable-diffusion-v1 https://huggingface.co/CompVis/stable-diffusion-v-1-4-original/resolve/main/sd-v1-4.ckpt \n", ">\n", ">wget -P models/ldm/stable-diffusion-v1 https://huggingface.co/runwayml/stable-diffusion-inpainting/resolve/main/sd-v1-5-inpainting.ckpt \n", "\n", "### From there, you can run the demo. \n", "\n", ">python \"scripts_cdc/img2img.py\n", "> --config configs/stable-diffusion/v1-inference.yaml \\\n", "> --ckpt models/ldm/stable-diffusion-v1/sd-v1-4.ckpt \\\n", "> --init_img examples/scribbles/images/ \\\n", "> --mask examples/scribbles/masks/ \\\n", "> --from_file examples/scribbles/prompts.txt \\\n", "> --batch_size 1 \\\n", "> --n_samples 1 \\\n", "> --outdir outputs/scribbles \\\n", "> --ddim_steps 50 \\\n", "> --strength 1.0 \\\n", "> --T_out 1.0 \\\n", "> --T_in 0.0 0.2 0.4 0.6 0.8 \\\n", "> --down_N_out 1 \\\n", "> --down_N_in 1 2 4 \\\n", "> --seed 42 \\\n", "> --repaint_start 0 0.2 0.4 0.6 0.8 \\\n", "> --skip_grid\"\n", "\n", "or to paste:\n", "\n", "python scripts_cdc/img2img.py --config configs/stable-diffusion/v1-inference.yaml --ckpt models/ldm/stable-diffusion-v1/sd-v1-4.ckpt --init_img examples/scribbles/images/ --mask examples/scribbles/masks/ --from_file examples/scribbles/prompts.txt --batch_size 1 --n_samples 1 --outdir outputs/scribbles --ddim_steps 50 --strength 1.0 --T_out 1.0 --T_in 0.0 0.2 0.4 0.6 0.8 --down_N_out 1 --down_N_in 1 2 4 --seed 42 --repaint_start 0 0.2 0.4 0.6 0.8 --skip_grid\n", "\n" ], "metadata": {} }, { "cell_type": "code", "execution_count": null, "source": [], "outputs": [], "metadata": {} } ], "metadata": { "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.16" } }, "nbformat": 4, "nbformat_minor": 5 }