0% found this document useful (0 votes)
4 views

resnetppo.ipynb

The document contains a Jupyter notebook that utilizes PyTorch and torchvision libraries for image processing and manipulation. It includes code for loading an image, preprocessing it, and creating a canvas to display the image. The notebook is set up for execution in a Kaggle environment with GPU support.
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
4 views

resnetppo.ipynb

The document contains a Jupyter notebook that utilizes PyTorch and torchvision libraries for image processing and manipulation. It includes code for loading an image, preprocessing it, and creating a canvas to display the image. The notebook is set up for execution in a Kaggle environment with GPU support.
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
You are on page 1/ 16

{"metadata":{"colab":{"provenance":

[],"authorship_tag":"ABX9TyMa4luEu5TX6eIwRsVTFquF"},"kernelspec":
{"name":"python3","display_name":"Python 3","language":"python"},"language_info":
{"name":"python","version":"3.10.14","mimetype":"text/x-python","codemirror_mode":
{"name":"ipython","version":3},"pygments_lexer":"ipython3","nbconvert_exporter":"py
thon","file_extension":".py"},"kaggle":
{"accelerator":"nvidiaTeslaT4","dataSources":
[{"sourceId":9563184,"sourceType":"datasetVersion","datasetId":5828021}],"dockerIma
geVersionId":30787,"isInternetEnabled":true,"language":"python","sourceType":"noteb
ook","isGpuEnabled":true}},"nbformat_minor":4,"nbformat":4,"cells":
[{"cell_type":"raw","source":"import torch\nimport torch.nn as nn\nimport
torch.nn.functional as F\nfrom torch.autograd import Variable\nfrom
torch.utils.data import DataLoader\nfrom torchvision import transforms\nfrom
torchvision import datasets\n#import resnet50\nfrom torchvision.models import
resnet18,resnet50,resnet101\nimport matplotlib.pyplot as plt\nimport numpy as np\
nimport matplotlib.pyplot as plt\nimport skimage\nimport random","metadata":
{"id":"xyDnWcQnfD6a","execution":{"iopub.status.busy":"2024-10-
06T23:22:58.532104Z","iopub.execute_input":"2024-10-
06T23:22:58.532514Z","iopub.status.idle":"2024-10-
06T23:23:03.008468Z","shell.execute_reply.started":"2024-10-
06T23:22:58.532478Z","shell.execute_reply":"2024-10-06T23:23:03.007348Z"}}},
{"cell_type":"code","source":"import torch\nimport torch.nn as nn\nimport
torch.nn.functional as F\nfrom torch.autograd import Variable\nfrom
torch.utils.data import DataLoader\nfrom torchvision import transforms\nfrom
torchvision import datasets\n#import resnet50\nfrom torchvision.models import
resnet18,resnet50,resnet101\nimport matplotlib.pyplot as plt\nimport numpy as np\
nimport matplotlib.pyplot as plt\nimport skimage\nimport random","metadata":
{"execution":{"iopub.status.busy":"2024-10-
07T18:36:29.490517Z","iopub.execute_input":"2024-10-
07T18:36:29.491795Z","iopub.status.idle":"2024-10-
07T18:36:29.498469Z","shell.execute_reply.started":"2024-10-
07T18:36:29.491745Z","shell.execute_reply":"2024-10-
07T18:36:29.497116Z"},"trusted":true},"execution_count":9,"outputs":[]},
{"cell_type":"markdown","source":"","metadata":{"id":"NPp-3gjFpA0q"}},
{"cell_type":"code","source":"#create
images\nmonkee=skimage.io.imread('/kaggle/input/imagenmonkee/fae6202d-2a8b-40e3-
8d05-19f46ba7b537.ce308afc0ee1b59229142849913da8f1.jpeg')\
nmonkee=skimage.transform.resize(monkee,(250,250))\nimg=monkee\npreprocess =
transforms.Compose([\n transforms.ToPILImage(),\n transforms.Resize(256),\n
transforms.CenterCrop(224),\n transforms.ToTensor(),\n
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),\n])\
n#create 1500x1500 canvas\ncanvas=np.zeros((1500,1500,3))\nx=750-monkee.shape[0]/2\
nresnetimages=[]\nys=[]\nfor i in range(3):\n y=random.randint(0,1000)\n
canvas[int(y):int(y)+monkee.shape[0],int(x):int(x)+monkee.shape[1]]=monkee\n
yinfivesec=y+(1/2)*9.8*5**2\n ys.append(yinfivesec)\n
resnetimages.append(preprocess(canvas))\n canvas=np.zeros((1500,1500,3))\n\n\n\
n\nplt.imshow(canvas)","metadata":{"id":"eEaP2EL-oBHq","colab":
{"base_uri":"https://localhost:8080/","height":452},"executionInfo":
{"status":"ok","timestamp":1728255917927,"user_tz":300,"elapsed":3765,"user":
{"displayName":"DAVID SANTIAGO SANCHEZ
CEPEDA","userId":"09385389865282990454"}},"outputId":"a46c42fe-b2fd-4e1a-df59-
e92043d17de6","execution":{"iopub.status.busy":"2024-10-
07T18:36:29.500507Z","iopub.execute_input":"2024-10-
07T18:36:29.500838Z","iopub.status.idle":"2024-10-
07T18:36:32.811426Z","shell.execute_reply.started":"2024-10-
07T18:36:29.500803Z","shell.execute_reply":"2024-10-
07T18:36:32.810011Z"},"trusted":true},"execution_count":10,"outputs":
[{"execution_count":10,"output_type":"execute_result","data":{"text/
plain":"<matplotlib.image.AxesImage at 0x7bfa4e2fffa0>"},"metadata":{}},
{"output_type":"display_data","data":{"text/plain":"<Figure size 640x480 with 1
Axes>","image/png":"iVBORw0KGgoAAAANSUhEUgAAAbIAAAGiCAYAAACCpUOHAAAAOXRFWHRTb2Z0d2F
yZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/
xnp5ZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAoPUlEQVR4nO3df3DU9Z3H8deGkCUx7iaA2SWYYNoyUIXSaD
S3ivYPMgSPUVud9ppJKUeZMlg8wToUMw7UXscmhTt/
3SlqZ06ZqZWWGcHKiE6aUCLTGCAEELCRGymk6CZXYnaDQkjY9/3R43ssRgllQ/
JJn4+Z94z5ft673897SPKaTT5ufGZmAgDAUWlDvQEAAC4FQQYAcBpBBgBwGkEGAHAaQQYAcBpBBgBwGkEGA
HAaQQYAcBpBBgBwGkEGAHDasA6yp59+Wtdcc43GjBmj0tJS7dixY6i3BAAYZoZtkP3617/WD3/4Q/34xz/
W7t27NWPGDJWXl6ujo2OotwYAGEZ8w/VNg0tLS3XjjTfqP//zPyVJiURCBQUF+pd/
+Rc99NBDQ7w7AMBwkT7UG+jP6dOn1dzcrKqqKu9aWlqaysrK1NjY2O9jenp61NPT432cSCTU2dmpcePGyef
zDfqeAQCpY2bq7u5Wfn6+0tI+/4eHwzLI/vKXv+jMmTMKhUJJ10OhkP74xz/2+5jq6mr95Cc/
uRzbAwBcJm1tbbr66qs/t2fY/
o7sYlVVVSkWi3l19OjRod4SAOASXXnllRfsGZavyMaPH69Ro0apvb096Xp7e7vC4XC/j/H7/fL7/
ZdjewCAy2Qgvxoalq/IMjIydMMNN6iurs67lkgkVFdXp0gkMoQ7AwAMN8PyFZkk/
fCHP9T8+fNVUlKim266SU888YQ+/vhjLViwYKi3BgAYRoZtkP3TP/2T/ud//
kerVq1SNBrVV7/6Vb3xxhufOgACAPj7Nmz/P7JLFY/
HFQwGh3obAIBLEIvFFAgEPrdnWP6ODACAgSLIAABOI8gAAE4jyAAATiPIAABOI8gAAE4jyAAATiPIAABOI8
gAAE4jyAAATiPIAABOI8gAAE4jyAAATiPIAABOI8gAAE4jyAAATiPIAABOI8gAAE4jyAAATiPIAABOI8gAA
E4jyAAATiPIAABOI8gAAE4jyAAATiPIAABOI8gAAE4jyAAATiPIAABOI8gAAE4jyAAATiPIAABOI8gAAE4j
yAAATiPIAABOI8gAAE4jyAAATiPIAABOS3mQVVdX68Ybb9SVV16pvLw8ff3rX1dra2tSz6lTp7RkyRKNGzd
O2dnZuueee9Te3p7Uc/ToUc2dO1dZWVnKy8vT8uXL1dfXl+rtAgAcl/
Ig27Ztm5YsWaK3335btbW16u3t1ezZs/Xxxx97PQ888IBee+01bdiwQdu2bdMHH3ygu+++21s/
c+aM5s6dq9OnT+sPf/iD1q1bpxdffFGrVq1K9XYBAK6zQdbR0WGSbNu2bWZm1tXVZaNHj7YNGzZ4Pe++
+65JssbGRjMze/311y0tLc2i0ajXs3btWgsEAtbT0zOg+8ZiMZNEURRFOVyxWOyC3+8H/
XdksVhMkjR27FhJUnNzs3p7e1VWVub1TJ06VYWFhWpsbJQkNTY2avr06QqFQl5PeXm54vG4Dhw40O99enp6
FI/
HkwoAMPINapAlEgktW7ZMt9xyi6ZNmyZJikajysjIUE5OTlJvKBRSNBr1es4NsbPrZ9f6U11drWAw6FVBQU
GKpwEADEeDGmRLlizR/v37tX79+sG8jSSpqqpKsVjMq7a2tkG/
JwBg6KUP1hPfd9992rx5sxoaGnT11Vd718PhsE6fPq2urq6kV2Xt7e0Kh8Nez44dO5Ke7+ypxrM95/P7/
fL7/
SmeAgAw3KX8FZmZ6b777tPGjRtVX1+voqKipPUbbrhBo0ePVl1dnXettbVVR48eVSQSkSRFIhG988476ujo
8Hpqa2sVCAR07bXXpnrLAACXXcQBxAG59957LRgM2u9//3v78MMPvfrkk0+8nsWLF1thYaHV19fbrl27LBK
JWCQS8db7+vps2rRpNnv2bNuzZ4+98cYbdtVVV1lVVdWA98GpRYqiKPdrIKcWUx5kn7WZF154wes5efKk/
eAHP7Dc3FzLysqyb3zjG/bhhx8mPc+f/vQnu/322y0zM9PGjx9vDz74oPX29g54HwQZRVGU+zWQIPP9X/
iMOPF4XMFgcKi3AQC4BLFYTIFA4HN7eK9FAIDTCDIAgNMIMgCA0wgyAIDTCDIAgNMIMgCA0wgyAIDTCDIAg
NMIMgCA0wgyAIDTCDIAgNMIMgCA0wgyAIDTCDIAgNMIMgCA0wgyAIDTCDIAgNMIMgCA0wgyAIDTCDIAgNMI
MgCA0wgyAIDTCDIAgNMIMgCA0wgyAIDTCDIAgNMIMgCA0wgyAIDTCDIAgNMIMgCA0wgyAIDTCDIAgNMIMgC
A0wgyAIDTCDIAgNMIMgCA0wgyAIDTCDIAgNMIMgCA0wY9yGpqauTz+bRs2TLv2qlTp7RkyRKNGzdO2dnZuu
eee9Te3p70uKNHj2ru3LnKyspSXl6eli9frr6+vsHeLgDAMYMaZDt37tRzzz2nr3zlK0nXH3jgAb322mvas
GGDtm3bpg8++EB33323t37mzBnNnTtXp0+f1h/
+8AetW7dOL774olatWjWY2wUAuMgGSXd3t02ePNlqa2vta1/7mi1dutTMzLq6umz06NG2YcMGr/
fdd981SdbY2GhmZq+//
rqlpaVZNBr1etauXWuBQMB6enr6vd+pU6csFot51dbWZpIoiqIohysWi10wbwbtFdmSJUs0d+5clZWVJV1v
bm5Wb29v0vWpU6eqsLBQjY2NkqTGxkZNnz5doVDI6ykvL1c8HteBAwf6vV91dbWCwaBXBQUFgzAVAGC4GZQ
gW79+vXbv3q3q6upPrUWjUWVkZCgnJyfpeigUUjQa9XrODbGz62fX+lNVVaVYLOZVW1tbCiYBAAx36al+wr
a2Ni1dulS1tbUaM2ZMqp/+M/n9fvn9/st2PwDA8JDyV2TNzc3q6OjQ9ddfr/
T0dKWnp2vbtm166qmnlJ6erlAopNOnT6urqyvpce3t7QqHw5KkcDj8qVOMZz8+2wMAgDQIQTZr1iy988472
rNnj1clJSWqrKz0/
nv06NGqq6vzHtPa2qqjR48qEolIkiKRiN555x11dHR4PbW1tQoEArr22mtTvWUAgMsu4WDigJ17atHMbPHi
xVZYWGj19fW2a9cui0QiFolEvPW+vj6bNm2azZ492/bs2WNvvPGGXXXVVVZVVTXge8ZisSE/
bUNRFEVdWg3k1GLKf0c2EI8//rjS0tJ0zz33qKenR+Xl5XrmmWe89VGjRmnz5s269957FYlEdMUVV2j+/
Pn613/916HYLgBgGPOZmQ31JgZDPB5XMBgc6m0AAC5BLBZTIBD43B7eaxEA4DSCDADgNIIMAOA0ggwA4DSC
DADgNIIMAOA0ggwA4DSCDADgNIIMAOA0ggwA4DSCDADgNIIMAOA0ggwA4DSCDADgNIIMAOA0ggwA4DSCDAD
gNIIMAOA0ggwA4DSCDADgNIIMAOA0ggwA4DSCDADgNIIMAOA0ggwA4DSCDADgNIIMAOA0ggwA4DSCDADgNI
IMAOA0ggwA4DSCDADgNIIMAOA0ggwA4DSCDADgNIIMAOA0ggwA4DSCDADgtEEJsmPHjuk73/
mOxo0bp8zMTE2fPl27du3y1s1Mq1at0oQJE5SZmamysjIdOnQo6Tk6OztVWVmpQCCgnJwcLVy4UCdOnBiM7
QIAHJbyIPvoo490yy23aPTo0dqyZYsOHjyof//
3f1dubq7Xs3r1aj311FN69tln1dTUpCuuuELl5eU6deqU11NZWakDBw6otrZWmzdvVkNDgxYtWpTq7QIAXG
cptmLFCps5c+ZnricSCQuHw7ZmzRrvWldXl/
n9fnv55ZfNzOzgwYMmyXbu3On1bNmyxXw+nx07dmxA+4jFYiaJoiiKcrhisdgFv9+n/
BXZb3/7W5WUlOib3/ym8vLyVFxcrF/
84hfe+uHDhxWNRlVWVuZdCwaDKi0tVWNjoySpsbFROTk5Kikp8XrKysqUlpampqamfu/
b09OjeDyeVACAkS/lQfb+++9r7dq1mjx5st58803de++9uv/+
+7Vu3TpJUjQalSSFQqGkx4VCIW8tGo0qLy8vaT09PV1jx471es5XXV2tYDDoVUFBQapHAwAMQykPskQioeu
vv14/+9nPVFxcrEWLFun73/++nn322VTfKklVVZVisZhXbW1tg3o/
AMDwkPIgmzBhgq699tqka1/+8pd19OhRSVI4HJYktbe3J/W0t7d7a+FwWB0dHUnrfX196uzs9HrO5/
f7FQgEkgoAMPKlPMhuueUWtba2Jl177733NGnSJElSUVGRwuGw6urqvPV4PK6mpiZFIhFJUiQSUVdXl5qbm
72e+vp6JRIJlZaWpnrLAACXDegI4EXYsWOHpaen26OPPmqHDh2yl156ybKysuyXv/
yl11NTU2M5OTn26quv2r59+
+yuu+6yoqIiO3nypNczZ84cKy4utqamJtu+fbtNnjzZKioqBrwPTi1SFEW5XwM5tZjyIDMze+2112zatGnm
9/tt6tSp9vzzzyetJxIJW7lypYVCIfP7/TZr1ixrbW1N6jl+/
LhVVFRYdna2BQIBW7BggXV3dw94DwQZRVGU+zWQIPOZmWkEisfjCgaDQ70NAMAliMViFzzzwHstAgCcRpAB
AJxGkAEAnEaQAQCcRpABAJxGkAEAnEaQAQCcRpABAJxGkAEAnEaQAQCcRpABAJxGkAEAnEaQAQCcRpABAJx
GkAEAnEaQAQCcRpABAJxGkAEAnEaQAQCcRpABAJxGkAEAnEaQAQCcRpABAJxGkAEAnEaQAQCcRpABAJxGkA
EAnEaQAQCcRpABAJxGkAEAnEaQAQCcRpABAJxGkAEAnEaQAQCcRpABAJxGkAEAnEaQAQCcRpABAJyW8iA7c
+aMVq5cqaKiImVmZuqLX/
yifvrTn8rMvB4z06pVqzRhwgRlZmaqrKxMhw4dSnqezs5OVVZWKhAIKCcnRwsXLtSJEydSvV0AgOssxR599
FEbN26cbd682Q4fPmwbNmyw7Oxse/LJJ72empoaCwaDtmnTJtu7d6/
deeedVlRUZCdPnvR65syZYzNmzLC3337b3nrrLfvSl75kFRUVA95HLBYzSRRFUZTDFYvFLvj9PuVBNnfuXP
ve976XdO3uu++2yspKMzNLJBIWDodtzZo13npXV5f5/
X57+eWXzczs4MGDJsl27tzp9WzZssV8Pp8dO3ZsQPsgyCiKotyvgQRZyn+0ePPNN6uurk7vvfeeJGnv3r3a
vn27br/9dknS4cOHFY1GVVZW5j0mGAyqtLRUjY2NkqTGxkbl5OSopKTE6ykrK1NaWpqampr6vW9PT4/
i8XhSAQBGvvRUP+FDDz2keDyuqVOnatSoUTpz5oweffRRVVZWSpKi0agkKRQKJT0uFAp5a9FoVHl5eckbTU
/X2LFjvZ7zVVdX6yc/+UmqxwEADHMpf0X2m9/8Ri+99JJ+9atfaffu3Vq3bp3+7d/
+TevWrUv1rZJUVVUpFot51dbWNqj3AwAMDyl/
RbZ8+XI99NBD+va3vy1Jmj59uo4cOaLq6mrNnz9f4XBYktTe3q4JEyZ4j2tvb9dXv/pVSVI4HFZHR0fS8/
b19amzs9N7/Pn8fr/8fn+qxwEADHMpf0X2ySefKC0t+WlHjRqlRCIhSSoqKlI4HFZdXZ23Ho/
H1dTUpEgkIkmKRCLq6upSc3Oz11NfX69EIqHS0tJUbxkA4LIBHQG8CPPnz7eJEyd6x+9feeUVGz9+vP3oRz
/yempqaiwnJ8deffVV27dvn9111139Hr8vLi62pqYm2759u02ePJnj9xRFUX9n
NSTH7+PxuC1dutQKCwttzJgx9oUvfMEefvhh6+np8XoSiYStXLnSQqGQ+f1+mzVrlrW2tiY9z/
Hjx62iosKys7MtEAjYggULrLu7e8D7IMgoiqLcr4EEmc/
snLfcGEHi8biCweBQbwMAcAlisZgCgcDn9vBeiwAApxFkAACnEWQAAKcRZAAApxFkAACnEWQAAKcRZAAApx
FkAACnEWQAAKcRZAAApxFkAACnEWQAAKcRZAAApxFkAACnEWQAAKcRZAAApxFkAACnEWQAAKcRZAAApxFkA
ACnEWQAAKcRZAAApxFkAACnEWQAAKcRZAAApxFkAACnEWQAAKcRZAAApxFkAACnEWQAAKcRZAAApxFkAACn
EWQAAKcRZAAApxFkAACnEWQAAKcRZAAApxFkAACnXXSQNTQ06I477lB+fr58Pp82bdqUtG5mWrVqlSZMmKD
MzEyVlZXp0KFDST2dnZ2qrKxUIBBQTk6OFi5cqBMnTiT17Nu3T7feeqvGjBmjgoICrV69+uKnAwCMeBcdZB
9//LFmzJihp59+ut/
11atX66mnntKzzz6rpqYmXXHFFSovL9epU6e8nsrKSh04cEC1tbXavHmzGhoatGjRIm89Ho9r9uzZmjRpkp
qbm7VmzRo98sgjev755/+GEQEAI5pdAkm2ceNG7+NEImHhcNjWrFnjXevq6jK/328vv/
yymZkdPHjQJNnOnTu9ni1btpjP57Njx46Zmdkzzzxjubm51tPT4/
WsWLHCpkyZMuC9xWIxk0RRFEU5XLFY7ILf71P6O7LDhw8rGo2qrKzMuxYMBlVaWqrGxkZJUmNjo3JyclRSU
uL1lJWVKS0tTU1NTV7PbbfdpoyMDK+nvLxcra2t+uijj/
q9d09Pj+LxeFIBAEa+lAZZNBqVJIVCoaTroVDIW4tGo8rLy0taT09P19ixY5N6+nuOc+9xvurqagWDQa8KC
goufSAAwLA3Yk4tVlVVKRaLedXW1jbUWwIAXAYpDbJwOCxJam9vT7re3t7urYXDYXV0dCSt9/
X1qbOzM6mnv+c49x7n8/
v9CgQCSQUAGPlSGmRFRUUKh8Oqq6vzrsXjcTU1NSkSiUiSIpGIurq61Nzc7PXU19crkUiotLTU62loaFBvb
6/XU1tbqylTpig3NzeVWwYAuG7AxwD/
T3d3t7W0tFhLS4tJsscee8xaWlrsyJEjZmZWU1NjOTk59uqrr9q+ffvsrrvusqKiIjt58qT3HHPmzLHi4mJ
ramqy7du32+TJk62iosJb7+rqslAoZPPmzbP9+/
fb+vXrLSsry5577rkB75NTixRFUe7XQE4tXnSQbd26td+bzZ8/38z+egR/5cqVFgqFzO/
326xZs6y1tTXpOY4fP24VFRWWnZ1tgUDAFixYYN3d3Uk9e/futZkzZ5rf77eJEydaTU3NRe2TIKMoinK/
BhJkPjMzjUDxeFzBYHCotwEAuASxWOyCZx5GzKlFAMDfJ4IMAOA0ggwA4DSCDADgNIIMAOA0ggwA4DSCDAD
gNIIMAOA0ggwA4DSCDADgNIIMAOA0ggwA4DSCDADgNIIMAOA0ggwA4DSCDADgNIIMAOA0ggwA4DSCDADgNI
IMAOA0ggwA4DSCDADgNIIMAOA0ggwA4DSCDADgNIIMAOA0ggwA4DSCDADgNIIMAOA0ggwA4DSCDADgNIIMA
OA0ggwA4DSCDADgNIIMAOA0ggwA4DSCDADgNIIMAOA0ggwA4LSLDrKGhgbdcccdys/Pl8/
n06ZNm7y13t5erVixQtOnT9cVV1yh/
Px8ffe739UHH3yQ9BydnZ2qrKxUIBBQTk6OFi5cqBMnTiT17Nu3T7feeqvGjBmjgoICrV69+m+bEAAwol10
kH388ceaMWOGnn766U+tffLJJ9q9e7dWrlyp3bt365VXXlFra6vuvPPOpL7KykodOHBAtbW12rx5sxoaGrR
o0SJvPR6Pa/bs2Zo0aZKam5u1Zs0aPfLII3r++ef/hhEBACOaXQJJtnHjxs/
t2bFjh0myI0eOmJnZwYMHTZLt3LnT69myZYv5fD47duyYmZk988wzlpubaz09PV7PihUrbMqUKZ95n1OnTl
ksFvOqra3NJFEURVEOVywWu2AWDfrvyGKxmHw+n3JyciRJjY2NysnJUUlJiddTVlamtLQ0NTU1eT233XabM
jIyvJ7y8nK1trbqo48+6vc+1dXVCgaDXhUUFAzeUACAYWNQg+zUqVNasWKFKioqFAgEJEnRaFR5eXlJfenp
6Ro7dqyi0ajXEwqFknrOfny253xVVVWKxWJetbW1pXocAMAwlD5YT9zb26tvfetbMjOtXbt2sG7j8fv98vv
9g34fAMDwMihBdjbEjhw5ovr6eu/VmCSFw2F1dHQk9ff19amzs1PhcNjraW9vT+o5+/
HZHgAApEH40eLZEDt06JB+97vfady4cUnrkUhEXV1dam5u9q7V19crkUiotLTU62loaFBvb6/
XU1tbqylTpig3NzfVWwYAuOyCx0HO093dbS0tLdbS0mKS7LHHHrOWlhY7cuSInT592u688067+uqrbc+ePf
bhhx96de4JxDlz5lhxcbE1NTXZ9u3bbfLkyVZRUeGtd3V1WSgUsnnz5tn+/ftt/
fr1lpWVZc8999yA9xmLxYb8tA1FURR1aTWQU4sXHWRbt27t92bz58+3w4cPf+Zmtm7d6j3H8ePHraKiwrKz
sy0QCNiCBQusu7s76T579+61mTNnmt/
vt4kTJ1pNTc1F7ZMgoyiKcr8GEmQ+MzONQPF4XMFgcKi3AQC4BLFYLOmcRX94r0UAgNMIMgCA0wgyAIDTCD
IAgNMIMgCA0wgyAIDTCDIAgNMIMgCA0wgyAIDTCDIAgNMIMgCA0wgyAIDTCDIAgNMIMgCA0wgyAIDTCDIAg
NMIMgCA0wgyAIDTCDIAgNMIMgCA0wgyAIDTCDIAgNMIMgCA0wgyAIDTCDIAgNMIMgCA0wgyAIDTCDIAgNMI
MgCA0wgyAIDTCDIAgNMIMgCA0wgyAIDTCDIAgNMIMgCA0wgyAIDTCDIAgNMIMgCA0y46yBoaGnTHHXcoPz9
fPp9PmzZt+szexYsXy+fz6Yknnki63tnZqcrKSgUCAeXk5GjhwoU6ceJEUs+
+fft06623asyYMSooKNDq1asvdqsAgL8DFx1kH3/8sWbMmKGnn376c/s2btyot99+W/
n5+Z9aq6ys1IEDB1RbW6vNmzeroaFBixYt8tbj8bhmz56tSZMmqbm5WWvWrNEjjzyi559//
mK3CwAY6ewSSLKNGzd+6vqf//xnmzhxou3fv98mTZpkjz/
+uLd28OBBk2Q7d+70rm3ZssV8Pp8dO3bMzMyeeeYZy83NtZ6eHq9nxYoVNmXKlAHvLRaLmSSKoijK4YrFYh
f8fp/
y35ElEgnNmzdPy5cv13XXXfep9cbGRuXk5KikpMS7VlZWprS0NDU1NXk9t912mzIyMrye8vJytba26qOPPu
r3vj09PYrH40kFABj5Uh5kP//5z5Wenq7777+/3/
VoNKq8vLyka+np6Ro7dqyi0ajXEwqFknrOfny253zV1dUKBoNeFRQUXOooAAAHpDTImpub9eSTT+rFF1+Uz
+dL5VNfUFVVlWKxmFdtbW2X9f4AgKGR0iB766231NHRocLCQqWnpys9PV1HjhzRgw8+qGuuuUaSFA6H1dHR
kfS4vr4+dXZ2KhwOez3t7e1JPWc/PttzPr/
fr0AgkFQAgJEvpUE2b9487du3T3v27PEqPz9fy5cv15tvvilJikQi6urqUnNzs/
e4+vp6JRIJlZaWej0NDQ3q7e31emprazVlyhTl5uamcssAANcN+Bjg/
+nu7raWlhZraWkxSfbYY49ZS0uLHTlypN/
+808tmpnNmTPHiouLrampybZv326TJ0+2iooKb72rq8tCoZDNmzfP9u/
fb+vXr7esrCx77rnnBrxPTi1SFEW5XwM5tXjRQbZ169Z+bzZ//vx++/
sLsuPHj1tFRYVlZ2dbIBCwBQsWWHd3d1LP3r17bebMmeb3+23ixIlWU1NzUfskyCiKotyvgQSZz8xMI1A8H
lcwGBzqbQAALkEsFrvgmQfeaxEA4DSCDADgNIIMAOA0ggwA4DSCDADgNIIMAOA0ggwA4DSCDADgNIIMAOA0
ggwA4DSCDADgNIIMAOA0ggwA4DSCDADgNIIMAOA0ggwA4LQRG2Qj9O+FAsDflYF8Lx+xQXb8+PGh3gIA4BJ
1d3dfsCf9MuxjSIwdO1aSdPToUQWDwSHezd8mHo+roKBAbW1tF/
xT38PZSJiDGYaHkTCDNDLmGOwZzEzd3d3Kz8+/YO+IDbK0tL++2AwGg85+opwVCAScn0EaGXMww/
AwEmaQRsYcgznDQF+EjNgfLQIA/j4QZAAAp43YIPP7/
frxj38sv98/1Fv5m42EGaSRMQczDA8jYQZpZMwxnGbwGefUAQAOG7GvyAAAfx8IMgCA0wgyAIDTCDIAgNMI
MgCA00ZkkD399NO65pprNGbMGJWWlmrHjh1DvSVPdXW1brzxRl155ZXKy8vT17/+dbW2tib1nDp1SkuWLNG
4ceOUnZ2te+65R+3t7Uk9R48e1dy5c5WVlaW8vDwtX75cfX19l3MUT01NjXw+n5YtW+Zdc2WGY8eO6Tvf+Y
7GjRunzMxMTZ8+Xbt27fLWzUyrVq3ShAkTlJmZqbKyMh06dCjpOTo7O1VZWalAIKCcnBwtXLhQJ06cuCz7P
3PmjFauXKmioiJlZmbqi1/8on76058mvdHqcJuhoaFBd9xxh/Lz8+Xz+bRp06ak9VTtd9+
+fbr11ls1ZswYFRQUaPXq1Zdtjt7eXq1YsULTp0/XFVdcofz8fH33u9/VBx98MKzmuNC/
xbkWL14sn8+nJ554YljNIEmyEWb9+vWWkZFh//Vf/
2UHDhyw73//+5aTk2Pt7e1DvTUzMysvL7cXXnjB9u/fb3v27LF//Md/
tMLCQjtx4oTXs3jxYisoKLC6ujrbtWuX/cM//IPdfPPN3npfX59NmzbNysrKrKWlxV5//
XUbP368VVVVXfZ5duzYYddcc4195StfsaVLlzo1Q2dnp02aNMn++Z//2Zqamuz999+3N9980/77v//
b66mpqbFgMGibNm2yvXv32p133mlFRUV28uRJr2fOnDk2Y8YMe/
vtt+2tt96yL33pS1ZRUXFZZnj00Udt3LhxtnnzZjt8+LBt2LDBsrOz7cknnxy2M7z+
+uv28MMP2yuvvGKSbOPGjUnrqdhvLBazUChklZWVtn//fnv55ZctMzPTnnvuucsyR1dXl5WVldmvf/1r+
+Mf/2iNjY1200032Q033JD0HEM9x4X+Lc565ZVXbMaMGZafn2+PP/
74sJrBzGzEBdlNN91kS5Ys8T4+c+aM5efnW3V19RDu6rN1dHSYJNu2bZuZ/
fULYPTo0bZhwwav59133zVJ1tjYaGZ//
eRLS0uzaDTq9axdu9YCgYD19PRctr13d3fb5MmTrba21r72ta95QebKDCtWrLCZM2d+5noikbBwOGxr1qzx
rnV1dZnf77eXX37ZzMwOHjxokmznzp1ez5YtW8zn89mxY8cGb/P/Z+7cufa9730v6drdd99tlZWVTsxw/
jfPVO33mWeesdzc3KTPpRUrVtiUKVMuyxz92bFjh0myI0eOmNnwm+OzZvjzn/
9sEydOtP3799ukSZOSgmy4zDCifrR4+vRpNTc3q6yszLuWlpamsrIyNTY2DuHOPlssFpP0/+/
W39zcrN7e3qQZpk6dqsLCQm+GxsZGTZ8+XaFQyOspLy9XPB7XgQMHLtvelyxZorlz5ybtVXJnht/
+9rcqKSnRN7/5TeXl5am4uFi/
+MUvvPXDhw8rGo0mzREMBlVaWpo0R05OjkpKSryesrIypaWlqampadBnuPnmm1VXV6f33ntPkrR3715t375
dt99+uzMznCtV+21sbNRtt92mjIwMr6e8vFytra366KOPLtM0yWKxmHw+n3Jycrw9Dvc5EomE5s2bp+XLl+
u666771PpwmWFEBdlf/
vIXnTlzJumboySFQiFFo9Eh2tVnSyQSWrZsmW655RZNmzZNkhSNRpWRkeF9sp917gzRaLTfGc+uXQ7r16/
X7t27VV1d/ak1V2Z4//33tXbtWk2ePFlvvvmm7r33Xt1///1at25d0j4+7/
MpGo0qLy8vaT09PV1jx469LHM89NBD+va3v62pU6dq9OjRKi4u1rJly1RZWenMDOdK1X6Hw+fXuU6dOqUVK
1aooqLCe6d4F+b4+c9/rvT0dN1///39rg+XGUbsn3FxwZIlS7R//
35t3759qLdyUdra2rR06VLV1tZqzJgxQ72dv1kikVBJSYl+9rOfSZKKi4u1f/9+Pfvss5o/f/
4Q725gfvOb3+ill17Sr371K1133XXas2ePli1bpvz8fGdmGOl6e3v1rW99S2amtWvXDvV2Bqy5uVlPPvmkd
u/eLZ/
PN9Tb+Vwj6hXZ+PHjNWrUqE+djmtvb1c4HB6iXfXvvvvu0+bNm7V161ZdffXV3vVwOKzTp0+rq6srqf/
cGcLhcL8znl0bbM3Nzero6ND111+v9PR0paena9u2bXrqqaeUnp6uUCg07GeQpAkTJujaa69NuvblL39ZR4
8eTdrH530+hcNhdXR0JK339fWps7PzssyxfPly71XZ9OnTNW/ePD3wwAPeK2UXZjhXqvY7HD6/
pP8PsSNHjqi2tjbp73YN9zneeustdXR0qLCw0Ps6P3LkiB588EFdc801w2qGERVkGRkZuuGGG1RXV+ddSyQ
SqqurUyQSGcKd/
T8z03333aeNGzeqvr5eRUVFSes33HCDRo8enTRDa2urjh496s0QiUT0zjvvJH0Cnf0iOf8b82CYNWuW3nnn
He3Zs8erkpISVVZWev893GeQpFtuueVT/+vDe+
+9p0mTJkmSioqKFA6Hk+aIx+NqampKmqOrq0vNzc1eT319vRKJhEpLSwd9hk8+
+cT7I7JnjRo1SolEwpkZzpWq/
UYiETU0NKi3t9frqa2t1ZQpU5Sbm3tZZjkbYocOHdLvfvc7jRs3Lml9uM8xb9487du3L+nrPD8/
X8uXL9ebb745vGZI2bGRYWL9+vXm9/vtxRdftIMHD9qiRYssJycn6XTcULr33nstGAza73//e/
vwww+9+uSTT7yexYsXW2FhodXX19uuXbssEolYJBLx1s8eXZ89e7bt2bPH3njjDbvqqquG5Pj9WeeeWjRzY
4YdO3ZYenq6Pfroo3bo0CF76aWXLCsry375y196PTU1NZaTk2Ovvvqq7du3z+66665+j4IXFxdbU1OTbd+
+3SZPnnzZjt/Pnz/fJk6c6B2/f+WVV2z8+PH2ox/
9aNjO0N3dbS0tLdbS0mKS7LHHHrOWlhbvNF8q9tvV1WWhUMjmzZtn+/fvt/Xr11tWVlZKj3x/3hynT5+2O+
+8066++mrbs2dP0tf6uaf3hnqOC/1bnO/8U4vDYQazEXj83szsP/
7jP6ywsNAyMjLspptusrfffnuot+SR1G+98MILXs/JkyftBz/4geXm5lpWVpZ94xvfsA8//DDpef70pz/
Z7bffbpmZmTZ+/Hh78MEHrbe39zJP8//
ODzJXZnjttdds2rRp5vf7berUqfb8888nrScSCVu5cqWFQiHz+/
02a9Ysa21tTeo5fvy4VVRUWHZ2tgUCAVuwYIF1d3dflv3H43FbunSpFRYW2pgxY+wLX/
iCPfzww0nfLIfbDFu3bu33a2D+/Pkp3e/
evXtt5syZ5vf7beLEiVZTU3PZ5jh8+PBnfq1v3bp12MxxoX+L8/
UXZEM9g5kZf48MAOC0EfU7MgDA3x+CDADgNIIMAOA0ggwA4DSCDADgNIIMAOA0ggwA4DSCDADgNIIMAOA0g
gwA4DSCDADgtP8FwPeruHk+YigAAAAASUVORK5CYII="},"metadata":{}}]},
{"cell_type":"code","source":"modelo = torch.hub.load('pytorch/vision:v0.10.0',
'vgg11', pretrained=True)","metadata":{"id":"vvKON_-4z95V","colab":
{"base_uri":"https://localhost:8080/"},"executionInfo":
{"status":"ok","timestamp":1728251816833,"user_tz":300,"elapsed":9866,"user":
{"displayName":"DAVID SANTIAGO SANCHEZ
CEPEDA","userId":"09385389865282990454"}},"outputId":"518d2b12-2ff8-4919-c1c9-
887d08768f53","execution":{"iopub.status.busy":"2024-10-
07T18:36:32.812687Z","iopub.execute_input":"2024-10-
07T18:36:32.813029Z","iopub.status.idle":"2024-10-
07T18:36:34.433425Z","shell.execute_reply.started":"2024-10-
07T18:36:32.812993Z","shell.execute_reply":"2024-10-
07T18:36:34.432273Z"},"trusted":true},"execution_count":11,"outputs":
[{"name":"stderr","text":"Using cache found in
/root/.cache/torch/hub/pytorch_vision_v0.10.0\n","output_type":"stream"}]},
{"cell_type":"code","source":"#from torchsummary import summary\
n#modelresnet=resnet101(pretrained=True)\n#print(modelresnet.layer4)\
n#","metadata":{"id":"1oROOdM80KW9","colab":{"base_uri":"https://
localhost:8080/"},"executionInfo":
{"status":"ok","timestamp":1728251820038,"user_tz":300,"elapsed":3213,"user":
{"displayName":"DAVID SANTIAGO SANCHEZ
CEPEDA","userId":"09385389865282990454"}},"outputId":"8a6a8415-9e54-4597-de94-
2005ab2761b1","execution":{"iopub.status.busy":"2024-10-
07T18:36:34.436012Z","iopub.execute_input":"2024-10-
07T18:36:34.436418Z","iopub.status.idle":"2024-10-
07T18:36:34.441784Z","shell.execute_reply.started":"2024-10-
07T18:36:34.436378Z","shell.execute_reply":"2024-10-
07T18:36:34.440681Z"},"trusted":true},"execution_count":12,"outputs":[]},
{"cell_type":"code","source":"device = torch.device(\"cuda\" if
torch.cuda.is_available() else \"cpu\")\nprint(f\"Using device: {device}\")\
ntorch.set_default_device(device)","metadata":{"id":"In5aLgpMfgr5","colab":
{"base_uri":"https://localhost:8080/"},"executionInfo":
{"status":"ok","timestamp":1728255917927,"user_tz":300,"elapsed":10,"user":
{"displayName":"DAVID
SANTIAGO SANCHEZ CEPEDA","userId":"09385389865282990454"}},"outputId":"6dd1e892-
d2ce-4958-f9aa-0a93d81b6ea6","execution":{"iopub.status.busy":"2024-10-
07T18:36:34.443168Z","iopub.execute_input":"2024-10-
07T18:36:34.443571Z","iopub.status.idle":"2024-10-
07T18:36:34.508903Z","shell.execute_reply.started":"2024-10-
07T18:36:34.443536Z","shell.execute_reply":"2024-10-
07T18:36:34.507587Z"},"trusted":true},"execution_count":13,"outputs":
[{"name":"stdout","text":"Using device: cuda\n","output_type":"stream"}]},
{"cell_type":"code","source":"\nclass game():\n def __init__(self):\n
self.x=450\n\n def reset(self):\n self.x=450\n self.y=0\n self.canvas =
np.zeros((1500, 1500,3))\n self.canvas[self.y:self.y+250,self.x:self.x+250]=img\
n print(self.canvas.shape)\n #resize image\n
self.canvas=preprocess(self.canvas)\n return self.canvas,0,False,{}\n def
step(self,action):\n self.canvas=np.zeros((1500,1500,3))\n print(self.y)\n
freefall=self.y+(1/2)*9.8*(2)**2\n print(\"yfuture\")\n print(freefall)\n\n
self.y=freefall\n if self.y>1500-250:\n done=True\n return
preprocess(self.canvas),1,done,{}\n else:\n done=False\n
self.y=int(freefall)\n print()\n
self.canvas[self.y:self.y+250,self.x:self.x+250]=img\n #resize image\n
stateaf=preprocess(self.canvas)\n reward=self.reward(action)\n info={}\n
return stateaf,reward,done,info\n\n\n def reward(self,action):\n
#print((self.y,action))\n return
100/(1+torch.nn.functional.mse_loss(torch.tensor(action),torch.tensor(self.y)))","m
etadata":{"id":"q6PTQIccn5bB","execution":{"iopub.status.busy":"2024-10-
07T18:36:34.510541Z","iopub.execute_input":"2024-10-
07T18:36:34.511222Z","iopub.status.idle":"2024-10-
07T18:36:34.524182Z","shell.execute_reply.started":"2024-10-
07T18:36:34.511182Z","shell.execute_reply":"2024-10-
07T18:36:34.522957Z"},"trusted":true},"execution_count":14,"outputs":[]},
{"cell_type":"code","source":"class ResBlock(torch.nn.Module):\n def
__init__(self,inn,outt,downsample):\n super(ResBlock,self).__init__()\n
self.conv1=torch.nn.Conv2d(inn,outt,3,padding=1)\n
self.batchnorm1=torch.nn.BatchNorm2d(outt)\n self.relu=torch.nn.ReLU()\n\n
self.downsample = downsample\n self.avgpool=torch.nn.AvgPool2d(2,2)\n if
self.downsample or inn != outt:\n self.identity_downsample =
nn.Conv2d(inn, outt, kernel_size=1, stride=1)\n self.identity_batchnorm
= nn.BatchNorm2d(outt)\n else:\n self.identity_downsample = None\n
def forward(self,x):\n residual=self.conv1(x)\n
residual=self.batchnorm1(residual)\n residual=self.relu(residual)\n identity
= x\n #print(\"identity shape\")\n #print(identity.shape)\n
#print(\"residual shape\")\n #print(residual.shape)\n if
self.identity_downsample is not None:\n identity =
self.identity_downsample(identity)\n #print(\"identity shape2\")\n
#print(identity.shape)\n identity = self.identity_batchnorm(identity)\n\n
# Add residual and identity\n output = residual + identity\n output =
self.relu(output)\n output = self.avgpool(output)\n return output\n
else:\n output = residual + identity\n output = self.relu(output)\n
output = self.avgpool(output)\n return output\n\n return output\n\n\
n","metadata":{"id":"HfF9Q9-6FTOP","execution":{"iopub.status.busy":"2024-10-
07T18:36:34.525941Z","iopub.execute_input":"2024-10-
07T18:36:34.526401Z","iopub.status.idle":"2024-10-
07T18:36:34.539323Z","shell.execute_reply.started":"2024-10-
07T18:36:34.526323Z","shell.execute_reply":"2024-10-
07T18:36:34.538234Z"},"trusted":true},"execution_count":15,"outputs":[]},
{"cell_type":"code","source":"class actorr(torch.nn.Module):\n def
__init__(self,limit):\n super(actorr,self).__init__()\n
self.resBlock1=ResBlock(3,32,True)\n self.resBlock2=ResBlock(32,32,True)\n
self.resBlock3=ResBlock(32,64,True)\n self.resBlock4=ResBlock(64,64,True)\n
#self.resnet=resnet18(pretrained=False)\n
#self.vgg=torch.hub.load('pytorch/vision:v0.10.0', 'vgg11', pretrained=True)\n\n
#self.vgg=torch.nn.Sequential(*list(self.vgg.children())[:-1])\n
self.flatten=torch.nn.Flatten()\n self.fcmean=torch.nn.Linear(100,1)\n
self.fcvar=torch.nn.Linear(12544,200)\n
self.fcvar2=torch.nn.Linear(200,100)\n self.fcvar3=torch.nn.Linear(100,1)\n
#self.instancenorm3=torch.nn.RMSNorm((1,200),elementwise_affine=True)\n
#self.instancenorm2=torch.nn.RMSNorm((1,100),elementwise_affine=True)\n
#self.instancenorm1=torch.nn.RMSNorm((1,200),elementwise_affine=True)\n
#self.groupnorm1=torch.nn.GroupNorm(num_groups=32,num_channels=512)\n
self.selu1=torch.nn.SELU()\n self.selu2=torch.nn.SELU()\n
self.selu3=torch.nn.SELU()\n self.sigmoid=torch.nn.Sigmoid()\n
self.limit=limit\n #initialize fc\n
#torch.nn.init.xavier_uniform_(self.fcmean.weight)\n
#torch.nn.init.xavier_uniform_(self.fcvar.weight)\n
#torch.nn.init.xavier_uniform_(self.fcvar2.weight)\n
#torch.nn.init.xavier_uniform_(self.fcvar3.weight)\n #remove last fc layer
withou\n #self.resnet.fc=torch.nn.Identity()\n
#self.vgg.classifier=torch.nn.Identity()\n\n\n #for param in
self.resnet.parameters():\n #param.requires_grad = False\n
#unfreeze with a probability\n #for param in self.resnet.parameters():\n
#delta=random.random()\n #if delta<self.limit:\n
#param.requires_grad = True\n #self.resnet.layer4.requires_grad_(True)\n
#self.resnet.layer3.requires_grad_(True)\n
#self.resnet.layer2.requires_grad_(True)\n
#self.resnet.layer1.requires_grad_(True)\n #for param in
self.vgg.parameters():\n #param.requires_grad = False\n for name,
param in self.named_parameters():\n print(f\"Parameter: {name} |
requires_grad: {param.requires_grad}\")\n\n\n\n def forward(self,x):\n
x=self.resBlock1(x)\n x=self.resBlock2(x)\n x=self.resBlock3(x)\n
x=self.resBlock4(x)\n #x=self.vgg(x)\n #x=self.groupnorm1(x)\n
#print(\"ex sheip\")\n #print(x.shape)\n x=self.flatten(x)\n\n\n
x=self.fcvar(x)\n #x=self.instancenorm1(x)\n #print(\"shapefif\")\n
#print(x.shape)\n #dinamic instance\n #x=torch.reshape(x,(-1,1,200))\
n #tensorrr=torch.Tensor([])\n #for i in x:\n
#print(i.shape)\n\n\n #i=self.instancenorm1(i)\n #concat\n
#tensorrr=torch.cat((tensorrr,i),0)\n #x=tensorrr\n\n
#x=torch.reshape(x,(-1,200))\n #x=self.selu1(x)\n x=torch.nn.SELU()
(x)\n\n #x=torch.nn.Dropout(0.5)(x)\n\n #x=tensorr\n
x=self.fcvar2(x)\n #x=torch.nn.InstanceNorm1d(x.shape[0])(x)\n
#x=self.selu2(x)\n x=torch.nn.SELU()(x)\n\n #x=torch.nn.Dropout(0.5)
(x)\n #x=self.instancenorm2(x)\n #x=self.selu2(x)\n
variance=self.fcvar3(x)\n mean=self.fcmean(x)\n
x=torch.cat((mean,variance),1)\n\n #concat the mean and variance\n
#x=self.instancenorm3(x)\n #x=self.sigmoid(x)\n\n\n\n
x=torch.nn.Sigmoid()(x)\n #x=torch.nn.ReLU6()(x)\n\n\n\n\n\n return
x\n def act(self,x,eps,deterministic):\n #print(\"input sheip\")\n
#print(x.shape)\n\n x=self.forward(x)\n #print(\"mean and std\")\n\n\
n #static variance\n variance=x[:,1]+1e-5\n
#variance=torch.ones(x.shape[0])\n variance=torch.clamp(variance,min=1e-
3,max=1.0*eps)\n variance=torch.reshape(variance,(-1,1))\n\n
#print(x[:,0])\n #print(x[:,1])\n #clampvar\n\n
cov=torch.diag(variance)\n dist=torch.distributions.Normal(x[:,0],variance)\
n if deterministic:\n action=x[:,0]*1500\n return
action,0,0,action\n action=dist.rsample()\n
action=torch.reshape(action,(-1,1))\n #print(\"action\")\n
#print(action)\n\n\n log_prob=dist.log_prob(action)\n
log_prob=log_prob\n actionunscaled=action\n action=action*1500\n
#print(\"log prob\")\n #print(log_prob)\n entropy=dist.entropy()\n
entropy=torch.mean(entropy)\n #print(\"entropy\")\n #print(entropy)\n
log_prob=torch.reshape(log_prob,(-1,1))\n if deterministic:\n
print(\"deterministaaaa\")\n action=x[:,0]*1500\n
print(\"action determinista\")\n print(action)\n\n\n return
action,log_prob,entropy,actionunscaled\n def
actonlogprob(self,x,eps,deterministic,actions):\n meaandavg=torch.Tensor([])\n
#print(\"input sheip\")\n #print(x.shape)\n for i in x:\n
i=torch.unsqueeze(i,0)\n\n i=self.forward(i)\n #print(\"actionss\")\n
#print(actions)\n meaandavg=torch.cat((meaandavg,i),0)\n x=meaandavg\n
#static variance\n variance=x[:,1]+1e-5\n #x=self.forward(x)\n
#variance=torch.ones(x.shape[0])\n variance=torch.clamp(variance,min=1e-
3,max=1.0*eps)\n dist=torch.distributions.Normal(x[:,0],variance)\n
logprobs=dist.log_prob(actions)\n logprobs=logprobs\n
entropy=dist.entropy()\n entropy=torch.mean(entropy)\n
#print(\"logprobs\")\n #print(logprobs)\n #print(\"entropy\")\n
#print(entropy)\n #print(\"mean and std\")\n #print(x[:,0])\n
#print(x[:,1])\n graph=plt.plot(x[:,0].cpu().detach())\n #save\n
#remove graph\n plt.savefig('graphmean.png')\n plt.clf()\n
#plt.show()\n graph=plt.plot(x[:,1].cpu().detach())\n #save\n
plt.savefig('graphstd.png')\n
plt.clf()\n #plt.show()\n\n\n\n return logprobs,entropy\n\nclass
qfunction(torch.nn.Module):\n def __init__(self):\n
super(qfunction,self).__init__()\n self.resnet=resnet18(pretrained=True)\n
self.vgg=torch.hub.load('pytorch/vision:v0.10.0', 'vgg11', pretrained=True)\n
self.vgg=torch.nn.Sequential(*list(self.vgg.children())[:-1])\n
self.resnet=torch.nn.Sequential(*list(self.resnet.children())[:-1])\n
self.flatten=torch.nn.Flatten()\n self.fc=torch.nn.Linear(512,100)\n
self.fc1=torch.nn.Linear(100,10)\n self.fc2=torch.nn.Linear(10,1)\n\n\n
self.conv1daction=torch.nn.Linear(1,32)\n
self.batchnorm=torch.nn.InstanceNorm1d(32)\n self.selu=torch.nn.SELU()\n
self.conv2daction=torch.nn.Linear(32,32)\n
self.batchnorm2=torch.nn.InstanceNorm1d(32)\n
self.convidentityaction=torch.nn.Linear(1,32)\n #initialize fc\n
torch.nn.init.xavier_uniform_(self.fc.weight)\n def forward(self,x):\n
x=self.resnet(x)\n #x=self.vgg(x)\n x=self.flatten(x)\n
x=self.fc(x)\n x=torch.nn.SELU()(x)\n x=self.fc1(x)\n
x=torch.nn.SELU()(x)\n x=self.fc2(x)\n return x","metadata":
{"id":"dcPiRY5ZnM6F","execution":{"iopub.status.busy":"2024-10-
07T18:36:34.540823Z","iopub.execute_input":"2024-10-
07T18:36:34.541299Z","iopub.status.idle":"2024-10-
07T18:36:34.578077Z","shell.execute_reply.started":"2024-10-
07T18:36:34.541259Z","shell.execute_reply":"2024-10-
07T18:36:34.576982Z"},"trusted":true},"execution_count":16,"outputs":[]},
{"cell_type":"markdown","source":"","metadata":{"id":"Rgksfug-Ntfx"}},
{"cell_type":"code","source":"from collections import deque","metadata":{"id":"-
wDlx0E6ySAf","execution":{"iopub.status.busy":"2024-10-
07T18:36:34.581090Z","iopub.execute_input":"2024-10-
07T18:36:34.581542Z","iopub.status.idle":"2024-10-
07T18:36:34.592623Z","shell.execute_reply.started":"2024-10-
07T18:36:34.581498Z","shell.execute_reply":"2024-10-
07T18:36:34.591314Z"},"trusted":true},"execution_count":17,"outputs":[]},
{"cell_type":"markdown","source":"","metadata":{}},
{"cell_type":"code","source":"class train():\n def
__init__(self,env,t,gamma,actor,critic,epochs,initial_eps,decay,deterministic,lambd
ad,epsilon,limit):\n self.epsilon=epsilon\n self.env=env\n self.t=t\n
self.gamma=gamma\n self.policy_net=actor(limit)\n self.critic_net=critic()\n
self.optimizeractor=torch.optim.Adam(self.policy_net.parameters(),lr=0.00006,weight
_decay=0.4)\n
self.optimizercritic=torch.optim.Adam(self.critic_net.parameters(),lr=0.004,weight_
decay=0.2)\n #clip grad\n
#torch.nn.utils.clip_grad_value_(self.policy_net.parameters(), clip_value=1.0)\n
#torch.nn.utils.clip_grad_value_(self.critic_net.parameters(), clip_value=1.0)\n
self.epochs=epochs\n self.acc_rewards=[]\n
#self.policy_net.resnet.requires_grad_(False)\n
self.critic_net.resnet.requires_grad_(False)\n
#self.policy_net.vgg.requires_grad_(False)\n
self.critic_net.vgg.requires_grad_(False)\n\n self.eps=initial_eps\n
self.decay=decay\n self.avgs=[]\n self.deterministic=deterministic\n
self.lmbda=lambdad\n\n def millonariosonacional(self,states):\n
al=random.random()\n if al<self.eps:\n print(\"millonarios\")\n
#create batch of random variables\n meaandesv = torch.rand((states.shape[0],
2))\n mean = meaandesv[:, 0]\n stddev = torch.clamp(meaandesv[:, 1],
min=1e-3, max=1.0)\n cov_matrix = torch.diag(stddev**2)\n
dist=torch.distributions.MultivariateNormal(mean,cov_matrix)\n
action=dist.rsample()\n log_prob=dist.log_prob(action)\n\n
action=action*1500\n\n print(action,log_prob)\n return action,log_prob\n\
n else:\n print(\"nacional\")\n
action,lobg_prob=self.policy_net.act(states)\n return action,lobg_prob\n\n\n
def runtimesteps(self,t):\n ts=t\n initial_arr=deque(maxlen=ts+100)\n
rewards=deque(maxlen=ts+100)\n dones=deque(maxlen=ts+100)\n
rotwarr=deque(maxlen=ts+100)\n actions=deque(maxlen=ts+100)\n
log_probs=deque(maxlen=ts+100)\n advantages=torch.Tensor([])\n
gaereturnss=torch.Tensor([])\n\n\n\n while t>=0:\n
rotwsub=deque(maxlen=ts+100)\n initial_intern=deque(maxlen=ts+100)\n
dones_intern=deque(maxlen=ts+100)\n
initial_state,reward,done,info=self.env.reset()\n acc_reward=0\n
trayectories=0\n\n\n while not done:\n #print(\"la forma\")\n
#print(initial_state.shape)\n
action,log_prob,_,actionunscaled=self.policy_net.act(initial_state.unsqueeze(0).to(
\"cuda\"),self.eps,self.deterministic)\n action=action\n
actions.append(actionunscaled)\n log_prob=log_prob\n
#action=torch.sigmoid(action)\n #action=action*1500\n
state,reward,done,info=self.env.step(action)\n
initial_intern.append(initial_state.detach())\n\n\n #print(reward)\n
rewards.append(reward)\n dones_intern.append(done)\n
log_probs.append(log_prob.detach())\n rotwsub.append(reward)\n t-=1\n
initial_state=state\n acc_reward+=reward\n
#advantages=advantages+self.gae(initial_intern,rotwsub,dones_intern)\n
gaeadvantages,gaereturns=self.gae(initial_intern,rotwsub,dones_intern)\n
advantages=torch.cat((advantages,gaeadvantages),0)\n
gaereturnss=torch.cat((gaereturnss,gaereturns),0)\n dones=dones+dones_intern\n
initial_arr=initial_arr+initial_intern\n self.acc_rewards.append(acc_reward)\n
trayectories+=1\n\n if len(self.acc_rewards)>=5:\n\n
batchacc=self.acc_rewards[-5:]\n avg=sum(batchacc)/len(batchacc)\n
self.avgs.append(avg)\n print(avg)\n #plt.plot(self.avgs)\n
#plt.show()\n\n rotwarr+=self.rewontheway(rotwsub)\n return
initial_arr,rewards,actions,dones,rotwarr,log_probs,advantages,gaereturnss,trayecto
ries\n def rewontheway(self,rotwsub):\n rotw=torch.Tensor([0])\n
output=deque(maxlen=len(rotwsub))\n print(rotwsub)\n rotwsub.reverse()\n
print(rotwsub)\n for z,i in enumerate(rotwsub):\n
rotw=i+rotw.clone()*self.gamma**(len(rotwsub)-z)\n\n output.insert(0,rotw)\n
return output\n def train(self):\n
initial_states,rewards,actions,dones,rotwar,log_pro,advantages,gaereturnss,trayecto
ries=self.runtimesteps(self.t)\n
tensorin=torch.stack(tuple(initial_states)).to(\"cuda\")\n
rotwarr=torch.stack(tuple(rotwar)).to(\"cuda\")\n
log_probs=torch.Tensor(log_pro).to(\"cuda\")\n #print(\"log_probs1\")\n
#print(log_probs)\n log_probs=torch.reshape(log_probs,(-1,1))\n
#print(\"log_probs2\")\n #print(log_probs)\n
actions=torch.Tensor(actions).to(\"cuda\")\n tensorin=tensorin\n\n\n\n for i
in range(self.epochs):\n clear_output()\n\n #1\n
criticeval=self.critic_net(tensorin)\n advantagee=advantages\n #2\n
#advantagee=((advantage-advantage.mean())/(advantage.std()+1e-5))\n\n\n
logprob,entropy=self.policy_net.actonlogprob(tensorin,self.eps,self.deterministic,a
ctions)\n logprob=torch.reshape(logprob,(-1,1))\n #print(\"logprob\")\n
#print(logprob)\n #print(\"log_probs\")\n #print(log_probs)\n
#action=torch.sigmoid(action)\n #action=action*1500\n
ratio=torch.exp(logprob-log_probs)\n #print(\"ratio\")\n
#print(ratio.shape)\n #ratio=torch.clamp(ratio,1-self.epsilon,1+self.epsilon)\
n #print(\"ratio\")\n #print(ratio)\n #print(\"advantagee\")\n
#print(advantagee)\n surr1=ratio*advantagee\n surr2=(torch.clamp(ratio,1-
self.epsilon,1+self.epsilon)*advantagee)\n loss1=(-
torch.min(surr1,surr2).sum())/len(initial_states)\n #print(\"loss1\")\n
#print(loss1)\n #print(\"lossss 1\")\n #print(loss1)\n #print
gradients\n\n loss2=F.mse_loss(criticeval,gaereturnss)\n
#print(\"loss2\")\n #print(loss2)\n #print(\"entrphyss\")\n
#print(entropy)\n\n\n\n finalloss=1*loss1+0.5*loss2-0.1*entropy\n
#print(\"finalloss\")\n #print(finalloss)\n
self.optimizeractor.zero_grad()\n self.optimizercritic.zero_grad()\n
finalloss.backward(retain_graph=True)\n #clip grad norm\n
torch.nn.utils.clip_grad_norm_(self.policy_net.parameters(), max_norm=1.0)\n
torch.nn.utils.clip_grad_norm_(self.critic_net.parameters(), max_norm=10.0)\n
self.optimizeractor.step()\n self.optimizercritic.step()\n print(\"Actor
Gradients:\")\n for name, param in self.policy_net.named_parameters():\n
if param.grad is not None:\n print(f\"Parameter: {name} | Grad Norm:
{param.grad.norm()}\")\n print(\"Critic Gradients:\")\n for name, param
in self.critic_net.named_parameters():\n if param.grad is not None:\n
print(f\"Parameter: {name} | Grad Norm: {param.grad.norm()}\")\n
self.eps=self.eps*self.decay\n print(\"epsilon\")\n print(self.eps)\n if
self.eps<0.01:\n self.eps=0.01\n\n
print({\"loss1\":loss1,\"loss2\":loss2,\"actions\":actions})\n def
gae(self,states,rewards,done):\n returns = deque(maxlen=300)\n gae = 0\n
print(\"zs\")\n states=torch.stack(tuple(states))\n print(rewards)\n
print(done)\n values=self.critic_net(states.to(\"cuda\")).detach()\n\n
#concat 0 tensor\n values=torch.cat((values,torch.Tensor([[0]])),0)\n
print(values.shape)\n for i in reversed(range(len(rewards))):\n delta =
rewards[i] + self.gamma * values[i + 1,0] - values[i,0]\n gae = delta +
self.gamma * self.lmbda * gae\n returns.insert(0, gae.detach() +
values[i].detach())\n\n adv = returns\n adv = torch.stack(tuple(adv))\n
#print(\"adv\")\n #print(adv)\n gaereturns=adv-values[:-1]\n
#print(\"gaereturns\")\n #print(gaereturns)\n
return ((adv - adv.mean()) / (adv.std() + 1e-10)),gaereturns\n\n\n","metadata":
{"id":"0aenyK7Nfn_0","execution":{"iopub.status.busy":"2024-10-
07T18:36:34.595033Z","iopub.execute_input":"2024-10-
07T18:36:34.595475Z","iopub.status.idle":"2024-10-
07T18:36:34.639468Z","shell.execute_reply.started":"2024-10-
07T18:36:34.595423Z","shell.execute_reply":"2024-10-
07T18:36:34.638354Z"},"trusted":true},"execution_count":18,"outputs":[]},
{"cell_type":"code","source":"","metadata":
{"id":"0dSCB7hMUUZo"},"execution_count":null,"outputs":[]},
{"cell_type":"code","source":"","metadata":
{"id":"UGYCDSpojUUY"},"execution_count":null,"outputs":[]},
{"cell_type":"code","source":"","metadata":
{"id":"cprNzShn5W_n"},"execution_count":null,"outputs":[]},
{"cell_type":"code","source":"","metadata":{"id":"1ZiAo4h78ZfA","colab":
{"base_uri":"https://localhost:8080/","height":141},"executionInfo":
{"status":"error","timestamp":1728246724522,"user_tz":300,"elapsed":389,"user":
{"displayName":"DAVID SANTIAGO SANCHEZ
CEPEDA","userId":"09385389865282990454"}},"outputId":"b918d089-35b4-45bf-986f-
26b73d285b6f","trusted":true},"execution_count":null,"outputs":[]},
{"cell_type":"code","source":"\n\
ntorch.set_default_tensor_type('torch.cuda.FloatTensor')\n\nfrom IPython.display
import clear_output\n\n\n\n\n\n\ntorch.autograd.set_detect_anomaly(True)\n\
ntrains=train(game(),100,0.99,actorr,qfunction,5,1,0.999,False,0.95,0.5,0.1)\
npipeline=400\nfor i in range(pipeline):\n clear_output(wait=True)\n
trains.train()","metadata":{"colab":{"base_uri":"https://localhost:8080/","height":
1000},"id":"UzDSDHvto1D-","outputId":"c1dc4155-da81-4ccd-aa66-
e082ff93a701","executionInfo":
{"status":"ok","timestamp":1728256378056,"user_tz":300,"elapsed":440455,"user":
{"displayName":"DAVID SANTIAGO SANCHEZ
CEPEDA","userId":"09385389865282990454"}},"execution":{"iopub.status.busy":"2024-
10-07T18:36:34.640833Z","iopub.execute_input":"2024-10-
07T18:36:34.641223Z","iopub.status.idle":"2024-10-
07T19:48:30.109874Z","shell.execute_reply.started":"2024-10-
07T18:36:34.641185Z","shell.execute_reply":"2024-10-
07T19:48:30.108102Z"},"trusted":true},"execution_count":19,"outputs":[{"traceback":
["\
u001b[0;31m------------------------------------------------------------------------
---\u001b[0m","\u001b[0;31mKeyboardInterrupt\u001b[0m
Traceback (most recent call last)","Cell \u001b[0;32mIn[19], line 16\u001b[0m\n\
u001b[1;32m 14\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m i \
u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mrange\u001b[39m(pipeline):\n\
u001b[1;32m 15\u001b[0m clear_output(wait\u001b[38;5;241m=\u001b[39m\
u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[0;32m---> 16\u001b[0m \
u001b[43mtrains\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtrain\
u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n","Cell \u001b[0;32mIn[18],
line 150\u001b[0m, in \u001b[0;36mtrain.train\u001b[0;34m(self)\u001b[0m\n\
u001b[1;32m 145\u001b[0m advantagee\u001b[38;5;241m=\u001b[39madvantages\n\
u001b[1;32m 146\u001b[0m \u001b[38;5;66;03m#2\u001b[39;00m\n\u001b[1;32m 147\
u001b[0m
\u001b[38;5;66;03m#advantagee=((advantage-advantage.mean())/(advantage.std()+1e-
5))\u001b[39;00m\n\u001b[0;32m--> 150\u001b[0m logprob,entropy\u001b[38;5;241m=\
u001b[39m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\
u001b[43mpolicy_net\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\
u001b[43mactonlogprob\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtensorin\u001b[49m\
u001b[43m,\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\
u001b[39;49m\u001b[43meps\u001b[49m\u001b[43m,\u001b[49m\u001b[38;5;28;43mself\
u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdeterministic\u001b[49m\
u001b[43m,\u001b[49m\u001b[43mactions\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m
151\u001b[0m logprob\u001b[38;5;241m=\u001b[39mtorch\u001b[38;5;241m.\
u001b[39mreshape(logprob,(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m,\
u001b[38;5;241m1\u001b[39m))\n\u001b[1;32m 152\u001b[0m \
u001b[38;5;66;03m#print(\"logprob\")\u001b[39;00m\n\u001b[1;32m 153\u001b[0m \
u001b[38;5;66;03m#print(logprob)\u001b[39;00m\n\u001b[1;32m 154\u001b[0m \
u001b[38;5;66;03m#print(\"log_probs\")\u001b[39;00m\n\u001b[1;32m 155\u001b[0m \
u001b[38;5;66;03m#print(log_probs)\u001b[39;00m\n\u001b[1;32m 156\u001b[0m \
u001b[38;5;66;03m#action=torch.sigmoid(action)\u001b[39;00m\n\u001b[1;32m 157\
u001b[0m \u001b[38;5;66;03m#action=action*1500\u001b[39;00m\n","Cell \
u001b[0;32mIn[16], line 170\u001b[0m, in \u001b[0;36mactorr.actonlogprob\
u001b[0;34m(self, x, eps, deterministic, actions)\u001b[0m\n\u001b[1;32m 167\
u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m i \u001b[38;5;129;01min\u001b[39;00m
x:\n\u001b[1;32m 168\u001b[0m i\u001b[38;5;241m=\u001b[39mtorch\
u001b[38;5;241m.\u001b[39munsqueeze(i,\u001b[38;5;241m0\u001b[39m)\n\u001b[0;32m-->
170\u001b[0m i\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;43mself\u001b[39;49m\
u001b[38;5;241;43m.\u001b[39;49m\u001b[43mforward\u001b[49m\u001b[43m(\u001b[49m\
u001b[43mi\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 171\u001b[0m \
u001b[38;5;66;03m#print(\"actionss\")\u001b[39;00m\n\u001b[1;32m 172\
u001b[0m \u001b[38;5;66;03m#print(actions)\u001b[39;00m\n\u001b[1;32m 173\
u001b[0m meaandavg\u001b[38;5;241m=\u001b[39mtorch\u001b[38;5;241m.\
u001b[39mcat((meaandavg,i),\u001b[38;5;241m0\u001b[39m)\n","Cell \
u001b[0;32mIn[16], line 57\u001b[0m, in \u001b[0;36mactorr.forward\
u001b[0;34m(self, x)\u001b[0m\n\u001b[1;32m 55\u001b[0m x\u001b[38;5;241m=\
u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mresBlock1(x)\n\
u001b[1;32m 56\u001b[0m x\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\
u001b[39m\u001b[38;5;241m.\u001b[39mresBlock2(x)\n\u001b[0;32m---> 57\u001b[0m x\
u001b[38;5;241m=\u001b[39m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\
u001b[39;49m\u001b[43mresBlock3\u001b[49m\u001b[43m(\u001b[49m\u001b[43mx\
u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 58\u001b[0m x\u001b[38;5;241m=\
u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mresBlock4(x)\n\
u001b[1;32m 59\u001b[0m \u001b[38;5;66;03m#x=self.vgg(x)\u001b[39;00m\n\
u001b[1;32m 60\u001b[0m \u001b[38;5;66;03m#x=self.groupnorm1(x)\u001b[39;00m\n\
u001b[1;32m 61\u001b[0m \u001b[38;5;66;03m#print(\"ex sheip\")\u001b[39;00m\n\
u001b[1;32m 62\u001b[0m \u001b[38;5;66;03m#print(x.shape)\u001b[39;00m\n","File
\u001b[0;32m/opt/conda/lib/python3.10/site-packages/torch/nn/modules/
module.py:1553\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self,
*args, **kwargs)\u001b[0m\n\u001b[1;32m 1551\u001b[0m \
u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\
u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \
u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m#
type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1552\u001b[0m \
u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1553\u001b[0m \
u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\
u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\
u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\
u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\
u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\
n","File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/torch/nn/modules/
module.py:1562\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args,
**kwargs)\u001b[0m\n\u001b[1;32m 1557\u001b[0m \u001b[38;5;66;03m# If we don't
have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m
1558\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\
u001b[39;00m\n\u001b[1;32m 1559\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \
u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\
u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\
u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\
u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks
\u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\
u001b[39m_forward_pre_hooks\n\u001b[1;32m 1560\u001b[0m \
u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\
u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1561\u001b[0m \
u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\
u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1562\u001b[0m \
u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\
u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\
u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\
u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\
u001b[1;32m 1564\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m
1565\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\
u001b[39;00m\n","Cell \u001b[0;32mIn[15], line 32\u001b[0m, in \
u001b[0;36mResBlock.forward\u001b[0;34m(self, x)\u001b[0m\n\u001b[1;32m 30\
u001b[0m \u001b[38;5;66;03m# Add residual and identity\u001b[39;00m\n\u001b[1;32m
31\u001b[0m output \u001b[38;5;241m=\u001b[39m residual \u001b[38;5;241m+\u001b[39m
identity\n\u001b[0;32m---> 32\u001b[0m output \u001b[38;5;241m=\u001b[39m \
u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrelu\
u001b[49m\u001b[43m(\u001b[49m\u001b[43moutput\u001b[49m\u001b[43m)\u001b[49m\n\
u001b[1;32m 33\u001b[0m output \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\
u001b[39m\u001b[38;5;241m.\u001b[39mavgpool(output)\n\u001b[1;32m 34\u001b[0m \
u001b[38;5;28;01mreturn\u001b[39;00m output\n","File
\u001b[0;32m/opt/conda/lib/python3.10/site-packages/torch/nn/modules/
module.py:1553\u001b[0m,
in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\
u001b[0m\n\u001b[1;32m 1551\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \
u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\
u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\
u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m
1552\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1553\u001b[0m
\u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\
u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\
u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\
u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\
u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\
n","File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/torch/nn/modules/
module.py:1562\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args,
**kwargs)\u001b[0m\n\u001b[1;32m 1557\u001b[0m \u001b[38;5;66;03m# If we don't
have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m
1558\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\
u001b[39;00m\n\u001b[1;32m 1559\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \
u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\
u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\
u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\
u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks
\u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\
u001b[39m_forward_pre_hooks\n\u001b[1;32m 1560\u001b[0m \
u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\
u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1561\u001b[0m \
u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\
u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1562\u001b[0m \
u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\
u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\
u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\
u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\
u001b[1;32m 1564\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m
1565\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\
u001b[39;00m\n","File
\u001b[0;32m/opt/conda/lib/python3.10/site-packages/torch/nn/modules/
activation.py:104\u001b[0m, in \u001b[0;36mReLU.forward\u001b[0;34m(self, input)\
u001b[0m\n\u001b[1;32m 103\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \
u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, \
u001b[38;5;28minput\u001b[39m: Tensor) \u001b[38;5;241m-\u001b[39m\
u001b[38;5;241m>\u001b[39m Tensor:\n\u001b[0;32m--> 104\u001b[0m \
u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF\u001b[49m\u001b[38;5;241;43m.\
u001b[39;49m\u001b[43mrelu\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\
u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minplace\u001b[49m\
u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\
u001b[38;5;241;43m.\u001b[39;49m\u001b[43minplace\u001b[49m\u001b[43m)\u001b[49m\
n","File
\u001b[0;32m/opt/conda/lib/python3.10/site-packages/torch/nn/functional.py:1496\
u001b[0m, in \u001b[0;36mrelu\u001b[0;34m(input, inplace)\u001b[0m\n\u001b[1;32m
1490\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124mr\u001b[39m\
u001b[38;5;124;03m\"\"\"relu(input, inplace=False) -> Tensor\u001b[39;00m\n\
u001b[1;32m 1491\u001b[0m \n\u001b[1;32m 1492\u001b[0m \
u001b[38;5;124;03mApplies the rectified linear unit function element-wise. See\
u001b[39;00m\n\u001b[1;32m 1493\u001b[0m \
u001b[38;5;124;03m:class:`~torch.nn.ReLU` for more details.\u001b[39;00m\n\
u001b[1;32m 1494\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m
1495\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m has_torch_function_unary(\
u001b[38;5;28minput\u001b[39m):\n\u001b[0;32m-> 1496\u001b[0m \
u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mhandle_torch_function\u001b[49m\
u001b[43m(\u001b[49m\u001b[43mrelu\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \
u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\
u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\
u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\
u001b[43minplace\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minplace\
u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1497\u001b[0m \u001b[38;5;28;01mif\
u001b[39;00m inplace:\n\u001b[1;32m 1498\u001b[0m result \u001b[38;5;241m=\
u001b[39m torch\u001b[38;5;241m.\u001b[39mrelu_(\u001b[38;5;28minput\u001b[39m)\
n","File
\u001b[0;32m/opt/conda/lib/python3.10/site-packages/torch/overrides.py:1630\
u001b[0m, in \u001b[0;36mhandle_torch_function\u001b[0;34m(public_api,
relevant_args, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1626\u001b[0m \
u001b[38;5;28;01mif\u001b[39;00m _is_torch_function_mode_enabled():\n\u001b[1;32m
1627\u001b[0m \u001b[38;5;66;03m# if we're here, the mode must be set to a
TorchFunctionStackMode\u001b[39;00m\n\u001b[1;32m 1628\u001b[0m \
u001b[38;5;66;03m# this unsets it and calls directly into TorchFunctionStackMode's
torch function\u001b[39;00m\n\u001b[1;32m 1629\u001b[0m \
u001b[38;5;28;01mwith\u001b[39;00m _pop_mode_temporarily() \u001b[38;5;28;01mas\
u001b[39;00m mode:\n\u001b[0;32m-> 1630\u001b[0m result \u001b[38;5;241m=\
u001b[39m \u001b[43mmode\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\
u001b[43m__torch_function__\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpublic_api\
u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtypes\u001b[49m\
u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\
u001b[49m\u001b[43m \u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\
u001b[1;32m 1631\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m result \
u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \
u001b[38;5;28mNotImplemented\u001b[39m:\n\u001b[1;32m 1632\u001b[0m \
u001b[38;5;28;01mreturn\u001b[39;00m result\n","File
\u001b[0;32m/opt/conda/lib/python3.10/site-packages/torch/utils/_device.py:79\
u001b[0m, in \u001b[0;36mDeviceContext.__torch_function__\u001b[0;34m(self, func,
types, args, kwargs)\u001b[0m\n\u001b[1;32m 77\u001b[0m \u001b[38;5;28;01mif\
u001b[39;00m func \u001b[38;5;129;01min\u001b[39;00m _device_constructors() \
u001b[38;5;129;01mand\u001b[39;00m kwargs\u001b[38;5;241m.\u001b[39mget(\
u001b[38;5;124m'\u001b[39m\u001b[38;5;124mdevice\u001b[39m\u001b[38;5;124m'\
u001b[39m) \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\
n\u001b[1;32m 78\u001b[0m kwargs[\u001b[38;5;124m'\u001b[39m\
u001b[38;5;124mdevice\u001b[39m\u001b[38;5;124m'\u001b[39m] \u001b[38;5;241m=\
u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdevice\n\
u001b[0;32m---> 79\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\
u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\
u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\
u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\
u001b[49m\n","File
\u001b[0;32m/opt/conda/lib/python3.10/site-packages/torch/nn/functional.py:1500\
u001b[0m, in \u001b[0;36mrelu\u001b[0;34m(input, inplace)\u001b[0m\n\u001b[1;32m
1498\u001b[0m result \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\
u001b[39mrelu_(\u001b[38;5;28minput\u001b[39m)\n\u001b[1;32m 1499\u001b[0m \
u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1500\u001b[0m result \
u001b[38;5;241m=\u001b[39m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\
u001b[39;49m\u001b[43mrelu\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\
u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1501\u001b[0m \
u001b[38;5;28;01mreturn\u001b[39;00m result\n","File
\u001b[0;32m/opt/conda/lib/python3.10/site-packages/torch/fx/traceback.py:72\
u001b[0m, in \u001b[0;36mformat_stack\u001b[0;34m()\u001b[0m\n\u001b[1;32m 69\
u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m [current_meta\u001b[38;5;241m.\
u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mstack_trace\u001b[39m\
u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m\"\
u001b[39m)]\n\u001b[1;32m 70\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\
u001b[1;32m 71\u001b[0m \u001b[38;5;66;03m# fallback to
traceback.format_stack()\u001b[39;00m\n\u001b[0;32m---> 72\u001b[0m \
u001b[38;5;28;01mreturn\u001b[39;00m traceback\u001b[38;5;241m.\
u001b[39mformat_list(\u001b[43mtraceback\u001b[49m\u001b[38;5;241;43m.\
u001b[39;49m\u001b[43mextract_stack\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\
u001b[49m[:\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m])\n","File \
u001b[0;32m/opt/conda/lib/python3.10/traceback.py:227\u001b[0m, in \
u001b[0;36mextract_stack\u001b[0;34m(f, limit)\u001b[0m\n\u001b[1;32m 225\
u001b[0m \u001b[38;5;28;01mif\u001b[39;00m f \u001b[38;5;129;01mis\u001b[39;00m \
u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 226\u001b[0m f \
u001b[38;5;241m=\u001b[39m sys\u001b[38;5;241m.\u001b[39m_getframe()\
u001b[38;5;241m.\u001b[39mf_back\n\u001b[0;32m--> 227\u001b[0m stack \
u001b[38;5;241m=\u001b[39m \u001b[43mStackSummary\u001b[49m\u001b[38;5;241;43m.\
u001b[39;49m\u001b[43mextract\u001b[49m\u001b[43m(\u001b[49m\u001b[43mwalk_stack\
u001b[49m\u001b[43m(\u001b[49m\u001b[43mf\u001b[49m\u001b[43m)\u001b[49m\
u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mlimit\u001b[49m\
u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlimit\u001b[49m\u001b[43m)\u001b[49m\n\
u001b[1;32m 228\u001b[0m stack\u001b[38;5;241m.\u001b[39mreverse()\n\u001b[1;32m
229\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m stack\n","File \
u001b[0;32m/opt/conda/lib/python3.10/traceback.py:376\u001b[0m, in \
u001b[0;36mStackSummary.extract\u001b[0;34m(klass, frame_gen, limit, lookup_lines,
capture_locals)\u001b[0m\n\u001b[1;32m 374\u001b[0m \u001b[38;5;28;01melse\
u001b[39;00m:\n\u001b[1;32m 375\u001b[0m f_locals \u001b[38;5;241m=\
u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m--> 376\u001b[0m
result\u001b[38;5;241m.\u001b[39mappend(\u001b[43mFrameSummary\u001b[49m\
u001b[43m(\u001b[49m\n\u001b[1;32m 377\u001b[0m \u001b[43m \u001b[49m\
u001b[43mfilename\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\
u001b[43mlineno\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mname\
u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mlookup_line\u001b[49m\
u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\
u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mlocals\u001b[39;49m\
u001b[38;5;241;43m=\u001b[39;49m\u001b[43mf_locals\u001b[49m\u001b[43m)\u001b[49m)\
n\u001b[1;32m 378\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m filename \
u001b[38;5;129;01min\u001b[39;00m fnames:\n\u001b[1;32m 379\u001b[0m
linecache\u001b[38;5;241m.\u001b[39mcheckcache(filename)\n","\
u001b[0;31mKeyboardInterrupt\u001b[0m:
"],"ename":"KeyboardInterrupt","evalue":"","output_type":"error"},
{"output_type":"display_data","data":{"text/plain":"<Figure size 640x480 with 0
Axes>"},"metadata":{}}]},{"cell_type":"code","source":"","metadata":
{"id":"2TPoufxnnxCD"},"execution_count":null,"outputs":[]},
{"cell_type":"markdown","source":"{-pOq,r->(sY-t),-s} imp -rYq\n\n* List item\n\
n* List item\n* List item\n\n\n* List item\n\n\n{-( ( (-pOq) Y (r->(sY-t)) Y
(-s) ) -> (-rYq)) }\n{( (-pOq) Y (r->(sY-t)) Y (-s) ) Y -(-rYq) }\n{( (-pOq) Y (r-
>(sY-t)) Y (-s) ) , -(-rYq) }\n{ (-pOq) , (r->(sY-t)) , (-s) , (rO-q) }\n{ (-
pOq) , (-rO(sY-t)) , (-s) , (rO-q) }\n{ (-pOq) , -r , (-s) , (rO-q) } { (-pOq) ,
(sY-t) , (-s) , (rO-q) }\n { (-pOq) , -r , (-s) , r } { (-pOq) , -r , (-s) ,-
q }\n\n\n\n { (-p) , -r , (-s) ,-q } { (-pOq) , s,-t
, (-s) , (rO-q) }\n x\n\
n","metadata":{"id":"4kXPEUA1QyWH"}},{"cell_type":"code","source":"pipeline=1000\
ntrains.deterministic=True\nprint(trains.deterministic)\nfor i in range(pipeline):\
n
trains.train()","metadata":{"colab":{"base_uri":"https://localhost:8080/","height":
1000},"executionInfo":
{"elapsed":77237,"status":"error","timestamp":1728081585552,"user":
{"displayName":"DAVID SANTIAGO SANCHEZ
CEPEDA","userId":"09385389865282990454"},"user_tz":300},"id":"BH7a6onBkpZX","output
Id":"f1916970-c3df-434d-ff0d-d71abf109a99","execution":{"iopub.status.busy":"2024-
10-07T19:48:30.111095Z","iopub.status.idle":"2024-10-
07T19:48:30.111669Z","shell.execute_reply.started":"2024-10-
07T19:48:30.111371Z","shell.execute_reply":"2024-10-
07T19:48:30.111402Z"},"trusted":true},"execution_count":null,"outputs":[]},
{"cell_type":"code","source":"#get all asci code\nimport string\n\
nascilist=list(\"abcdefghijkl\")\n\nimage=\"eldiablo.png\"\
nimgtonumpy=skimage.io.imread(image)\n#do a mean between all the channels\
nimgtonumpy=np.mean(imgtonumpy,axis=2)\n#normalize\nimgtonumpy=imgtonumpy/255\
n#resize\nimgtonumpy=skimage.transform.resize(imgtonumpy,(50,100))\
nshape=imgtonumpy.shape\nflatten=np.reshape(imgtonumpy,(-1))\ntoascii=\"\"\nfor i
in range(len(flatten)):\n if i%(shape[1])== 0:\n print(shape[1])\n
toascii+=\"\\n\"\n\n\n toascii+=ascilist[int(flatten[i]*(len(ascilist)-1))]\n\
nprint(toascii)\n","metadata":{"colab":{"base_uri":"https://
localhost:8080/"},"executionInfo":
{"elapsed":216,"status":"ok","timestamp":1727975791655,"user":{"displayName":"DAVID
SANTIAGO SANCHEZ
CEPEDA","userId":"09385389865282990454"},"user_tz":300},"id":"SSNg8LVbOL_5","output
Id":"f96c3303-fcf8-477f-f395-8687f1c0ca55","execution":{"iopub.status.busy":"2024-
10-07T19:48:30.113560Z","iopub.status.idle":"2024-10-
07T19:48:30.114129Z","shell.execute_reply.started":"2024-10-
07T19:48:30.113808Z","shell.execute_reply":"2024-10-
07T19:48:30.113836Z"},"trusted":true},"execution_count":null,"outputs":[]},
{"cell_type":"markdown","source":"satisfacible\n\n{-(p->( (-pOq ) ->q ) )}\n{p, -(
(-pOq ) ->q ) )}\n{p, -( -(-pOq ) O q ) )}\n{p, (-pOq ) Y -q }\n{p, (-pOq ) , q
}\n{p, -p,q},{","metadata":{"id":"cTi7xYdIJmTN"}},
{"cell_type":"markdown","source":"{ (-pOq),( q-> ( (-rY-p) Y (pOr) ) ) }\n{ (-
pOq),( -q O ( (-rY-p) Y (pOr) ) ) }","metadata":{"id":"PnA5wtBxLCGk"}},
{"cell_type":"markdown","source":"pY(-qO-p)\n{p,-qO-p}\n{p,-q} {p,-p}\n .
x","metadata":{"id":"RevKTpyY6pND"}},{"cell_type":"markdown","source":"(pOq)Y(-pO-
q)\n{pOq,-pO-q}\n{p,-pO-q} {q,-pO-q}\n{p,-p} {p,-q} {q,-p} {q,-q}\n x o
o x\n","metadata":{"id":"GTuG2ueN68nE"}},
{"cell_type":"markdown","source":"{pOq,-pY-q}\n{p,-pY-q} {q,-pY-q}\n{p,-p,-q} {q,-
p,-q}\n x x\n","metadata":{"id":"p7ltRXV27yoz"}},
{"cell_type":"markdown","source":"{p->q,q->p}\n{p","metadata":{"id":"K2zKeEwI-
bhl"}},{"cell_type":"code","source":"#test layernorm on vector\
nvector=torch.Tensor([1,2,3])\nvector=torch.reshape(vector,(1,1,-1))\
nlayernorm=torch.nn.LayerNorm((1,3))\nresult=layernorm(vector)\
nprint(result)","metadata":{"colab":{"base_uri":"https://
localhost:8080/"},"executionInfo":
{"elapsed":208,"status":"ok","timestamp":1727536071355,"user":{"displayName":"DAVID
SANTIAGO SANCHEZ
CEPEDA","userId":"09385389865282990454"},"user_tz":300},"id":"QY9Q6seSHajs","output
Id":"757a01df-4ea6-4aa8-aa02-b4a27c2e6738","execution":{"iopub.status.busy":"2024-
10-07T19:48:30.115554Z","iopub.status.idle":"2024-10-
07T19:48:30.116376Z","shell.execute_reply.started":"2024-10-
07T19:48:30.116077Z","shell.execute_reply":"2024-10-
07T19:48:30.116115Z"},"trusted":true},"execution_count":null,"outputs":[]},
{"cell_type":"markdown","source":"\n","metadata":{"id":"RbykQvuOHbE0"}}]}

You might also like