Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
jagar2 committed Apr 18, 2023
1 parent e5433f1 commit 6ea542b
Show file tree
Hide file tree
Showing 3 changed files with 66 additions and 35 deletions.
73 changes: 49 additions & 24 deletions m3_learning/papers/2023_Rapid_Fitting/1_SHO_Fitting.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": 5,
"id": "f5c36aed",
"metadata": {},
"outputs": [],
Expand All @@ -30,7 +30,7 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 6,
"id": "a6feb316",
"metadata": {},
"outputs": [
Expand All @@ -39,25 +39,7 @@
"output_type": "stream",
"text": [
"The autoreload extension is already loaded. To reload it, use:\n",
" %reload_ext autoreload\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-04-18 15:23:37.996668: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: AVX2 FMA\n",
"To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.\n",
"2023-04-18 15:23:38.111353: E tensorflow/stream_executor/cuda/cuda_blas.cc:2981] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n",
"2023-04-18 15:23:38.558393: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libnvinfer.so.7'; dlerror: libnvinfer.so.7: cannot open shared object file: No such file or directory; LD_LIBRARY_PATH: /home/ferroelectric/anaconda3/envs/rapidfitting/lib/python3.9/site-packages/cv2/../../lib64:\n",
"2023-04-18 15:23:38.558487: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libnvinfer_plugin.so.7'; dlerror: libnvinfer_plugin.so.7: cannot open shared object file: No such file or directory; LD_LIBRARY_PATH: /home/ferroelectric/anaconda3/envs/rapidfitting/lib/python3.9/site-packages/cv2/../../lib64:\n",
"2023-04-18 15:23:38.558496: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Cannot dlopen some TensorRT libraries. If you would like to use Nvidia GPU with TensorRT, please make sure the missing libraries mentioned above are installed properly.\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" %reload_ext autoreload\n",
"printing set for seaborn\n",
"Pytorch seed was set to 42\n",
"Numpy seed was set to 42\n",
Expand All @@ -70,6 +52,8 @@
"# pip install m3_learning\n",
"\n",
"%load_ext autoreload\n",
"%autoreload 2\n",
"\n",
"import numpy as np\n",
"\n",
"from m3_learning.nn.random import random_seed\n",
Expand Down Expand Up @@ -101,7 +85,28 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 9,
"id": "bf67e323",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'/home/ferroelectric/m3_learning/m3_learning/papers/2023_Rapid_Fitting'"
]
},
"execution_count": 9,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"pwd"
]
},
{
"cell_type": "code",
"execution_count": 16,
"id": "e0f238cd",
"metadata": {
"colab": {
Expand All @@ -116,7 +121,27 @@
"name": "stdout",
"output_type": "stream",
"text": [
"Using files already downloaded\n"
"downloading data\n",
"...2%, 40 MB, 801 KB/s, 51 seconds passedd"
]
},
{
"ename": "KeyboardInterrupt",
"evalue": "",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
"Cell \u001b[0;32mIn[16], line 9\u001b[0m\n\u001b[1;32m 6\u001b[0m save_path \u001b[39m=\u001b[39m \u001b[39m'\u001b[39m\u001b[39m./../../../Data/2023_rapid_fitting\u001b[39m\u001b[39m'\u001b[39m\n\u001b[1;32m 8\u001b[0m \u001b[39m# download the file\u001b[39;00m\n\u001b[0;32m----> 9\u001b[0m download_and_unzip(filename, url, save_path)\n",
"File \u001b[0;32m~/m3_learning/m3_learning/src/m3_learning/util/file_IO.py:121\u001b[0m, in \u001b[0;36mdownload_and_unzip\u001b[0;34m(filename, url, save_path, force)\u001b[0m\n\u001b[1;32m 119\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[1;32m 120\u001b[0m \u001b[39mprint\u001b[39m(\u001b[39m'\u001b[39m\u001b[39mdownloading data\u001b[39m\u001b[39m'\u001b[39m)\n\u001b[0;32m--> 121\u001b[0m download_file(url, path)\n\u001b[1;32m 123\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39m'\u001b[39m\u001b[39m.zip\u001b[39m\u001b[39m'\u001b[39m \u001b[39min\u001b[39;00m filename:\n\u001b[1;32m 124\u001b[0m \u001b[39mif\u001b[39;00m os\u001b[39m.\u001b[39mpath\u001b[39m.\u001b[39misfile(path):\n",
"File \u001b[0;32m~/m3_learning/m3_learning/src/m3_learning/util/file_IO.py:60\u001b[0m, in \u001b[0;36mdownload_file\u001b[0;34m(url, filename)\u001b[0m\n\u001b[1;32m 53\u001b[0m \u001b[39m\u001b[39m\u001b[39m\"\"\" A function that downloads the data file from a URL\u001b[39;00m\n\u001b[1;32m 54\u001b[0m \n\u001b[1;32m 55\u001b[0m \u001b[39mArgs:\u001b[39;00m\n\u001b[1;32m 56\u001b[0m \u001b[39m url (string): url where the file to download is located\u001b[39;00m\n\u001b[1;32m 57\u001b[0m \u001b[39m filename (string): location where to save the file\u001b[39;00m\n\u001b[1;32m 58\u001b[0m \u001b[39m\"\"\"\u001b[39;00m\n\u001b[1;32m 59\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mnot\u001b[39;00m os\u001b[39m.\u001b[39mpath\u001b[39m.\u001b[39misfile(filename):\n\u001b[0;32m---> 60\u001b[0m urllib\u001b[39m.\u001b[39;49mrequest\u001b[39m.\u001b[39;49murlretrieve(url, filename, reporthook)\n",
"File \u001b[0;32m~/anaconda3/envs/rapidfitting/lib/python3.9/urllib/request.py:268\u001b[0m, in \u001b[0;36murlretrieve\u001b[0;34m(url, filename, reporthook, data)\u001b[0m\n\u001b[1;32m 265\u001b[0m reporthook(blocknum, bs, size)\n\u001b[1;32m 267\u001b[0m \u001b[39mwhile\u001b[39;00m \u001b[39mTrue\u001b[39;00m:\n\u001b[0;32m--> 268\u001b[0m block \u001b[39m=\u001b[39m fp\u001b[39m.\u001b[39;49mread(bs)\n\u001b[1;32m 269\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mnot\u001b[39;00m block:\n\u001b[1;32m 270\u001b[0m \u001b[39mbreak\u001b[39;00m\n",
"File \u001b[0;32m~/anaconda3/envs/rapidfitting/lib/python3.9/http/client.py:458\u001b[0m, in \u001b[0;36mHTTPResponse.read\u001b[0;34m(self, amt)\u001b[0m\n\u001b[1;32m 455\u001b[0m \u001b[39mif\u001b[39;00m amt \u001b[39mis\u001b[39;00m \u001b[39mnot\u001b[39;00m \u001b[39mNone\u001b[39;00m:\n\u001b[1;32m 456\u001b[0m \u001b[39m# Amount is given, implement using readinto\u001b[39;00m\n\u001b[1;32m 457\u001b[0m b \u001b[39m=\u001b[39m \u001b[39mbytearray\u001b[39m(amt)\n\u001b[0;32m--> 458\u001b[0m n \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mreadinto(b)\n\u001b[1;32m 459\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mmemoryview\u001b[39m(b)[:n]\u001b[39m.\u001b[39mtobytes()\n\u001b[1;32m 460\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[1;32m 461\u001b[0m \u001b[39m# Amount is not given (unbounded read) so we must check self.length\u001b[39;00m\n\u001b[1;32m 462\u001b[0m \u001b[39m# and self.chunked\u001b[39;00m\n",
"File \u001b[0;32m~/anaconda3/envs/rapidfitting/lib/python3.9/http/client.py:502\u001b[0m, in \u001b[0;36mHTTPResponse.readinto\u001b[0;34m(self, b)\u001b[0m\n\u001b[1;32m 497\u001b[0m b \u001b[39m=\u001b[39m \u001b[39mmemoryview\u001b[39m(b)[\u001b[39m0\u001b[39m:\u001b[39mself\u001b[39m\u001b[39m.\u001b[39mlength]\n\u001b[1;32m 499\u001b[0m \u001b[39m# we do not use _safe_read() here because this may be a .will_close\u001b[39;00m\n\u001b[1;32m 500\u001b[0m \u001b[39m# connection, and the user is reading more bytes than will be provided\u001b[39;00m\n\u001b[1;32m 501\u001b[0m \u001b[39m# (for example, reading in 1k chunks)\u001b[39;00m\n\u001b[0;32m--> 502\u001b[0m n \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mfp\u001b[39m.\u001b[39;49mreadinto(b)\n\u001b[1;32m 503\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mnot\u001b[39;00m n \u001b[39mand\u001b[39;00m b:\n\u001b[1;32m 504\u001b[0m \u001b[39m# Ideally, we would raise IncompleteRead if the content-length\u001b[39;00m\n\u001b[1;32m 505\u001b[0m \u001b[39m# wasn't satisfied, but it might break compatibility.\u001b[39;00m\n\u001b[1;32m 506\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_close_conn()\n",
"File \u001b[0;32m~/anaconda3/envs/rapidfitting/lib/python3.9/socket.py:704\u001b[0m, in \u001b[0;36mSocketIO.readinto\u001b[0;34m(self, b)\u001b[0m\n\u001b[1;32m 702\u001b[0m \u001b[39mwhile\u001b[39;00m \u001b[39mTrue\u001b[39;00m:\n\u001b[1;32m 703\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[0;32m--> 704\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_sock\u001b[39m.\u001b[39;49mrecv_into(b)\n\u001b[1;32m 705\u001b[0m \u001b[39mexcept\u001b[39;00m timeout:\n\u001b[1;32m 706\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_timeout_occurred \u001b[39m=\u001b[39m \u001b[39mTrue\u001b[39;00m\n",
"File \u001b[0;32m~/anaconda3/envs/rapidfitting/lib/python3.9/ssl.py:1241\u001b[0m, in \u001b[0;36mSSLSocket.recv_into\u001b[0;34m(self, buffer, nbytes, flags)\u001b[0m\n\u001b[1;32m 1237\u001b[0m \u001b[39mif\u001b[39;00m flags \u001b[39m!=\u001b[39m \u001b[39m0\u001b[39m:\n\u001b[1;32m 1238\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mValueError\u001b[39;00m(\n\u001b[1;32m 1239\u001b[0m \u001b[39m\"\u001b[39m\u001b[39mnon-zero flags not allowed in calls to recv_into() on \u001b[39m\u001b[39m%s\u001b[39;00m\u001b[39m\"\u001b[39m \u001b[39m%\u001b[39m\n\u001b[1;32m 1240\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m\u001b[39m__class__\u001b[39m)\n\u001b[0;32m-> 1241\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mread(nbytes, buffer)\n\u001b[1;32m 1242\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[1;32m 1243\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39msuper\u001b[39m()\u001b[39m.\u001b[39mrecv_into(buffer, nbytes, flags)\n",
"File \u001b[0;32m~/anaconda3/envs/rapidfitting/lib/python3.9/ssl.py:1099\u001b[0m, in \u001b[0;36mSSLSocket.read\u001b[0;34m(self, len, buffer)\u001b[0m\n\u001b[1;32m 1097\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[1;32m 1098\u001b[0m \u001b[39mif\u001b[39;00m buffer \u001b[39mis\u001b[39;00m \u001b[39mnot\u001b[39;00m \u001b[39mNone\u001b[39;00m:\n\u001b[0;32m-> 1099\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_sslobj\u001b[39m.\u001b[39;49mread(\u001b[39mlen\u001b[39;49m, buffer)\n\u001b[1;32m 1100\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[1;32m 1101\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_sslobj\u001b[39m.\u001b[39mread(\u001b[39mlen\u001b[39m)\n",
"\u001b[0;31mKeyboardInterrupt\u001b[0m: "
]
}
],
Expand All @@ -126,7 +151,7 @@
"\n",
"# Specify the filename and the path to save the file\n",
"filename = 'data_raw.h5'\n",
"save_path = './'\n",
"save_path = './../../../Data/2023_rapid_fitting'\n",
"\n",
"# download the file\n",
"download_and_unzip(filename, url, save_path)"
Expand Down
Binary file not shown.
28 changes: 17 additions & 11 deletions m3_learning/src/m3_learning/util/file_IO.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ def make_folder(folder, **kwargs):
@author: Joshua C. Agar
"""


def reporthook(count, block_size, total_size):
"""
A function that displays the status and speed of the download
Expand All @@ -54,7 +55,7 @@ def download_file(url, filename):
Args:
url (string): url where the file to download is located
filename (string): location where to save the file
"""
"""
if not os.path.isfile(filename):
urllib.request.urlretrieve(url, filename, reporthook)

Expand All @@ -66,21 +67,23 @@ def compress_folder(base_name, format, root_dir=None):
base_name (string): base name of the zip file
format (string): sets the format of the zip file. Can either be zip or tar
root_dir (string, optional): sets the root directory to save the file. Defaults to None.
"""
"""
shutil.make_archive(base_name, format, root_dir)


def unzip(filename, path):
"""Function that unzips the files
Args:
filename (string): base name of the zip file
path (string): path where the zip file will be saved
"""
"""
zip_ref = zipfile.ZipFile('./' + filename, 'r')
zip_ref.extractall(path)
zip_ref.close()


def get_size(start_path='.'):
"""A function that computes the size of a folder
Expand All @@ -90,31 +93,34 @@ def get_size(start_path='.'):
Returns:
float: Size of the folder
"""
"""
total_size = 0
for dirpath, dirnames, filenames in os.walk(start_path):
for f in filenames:
fp = os.path.join(dirpath, f)
total_size += os.path.getsize(fp)
return total_size

def download_and_unzip(filename, url, save_path, force = False):

def download_and_unzip(filename, url, save_path, force=False):
"""Function that computes the size of a folder
Args:
filename (str): filename to save the zip file
url (str): url where the file is located
save_path (str): place where the data is saved
download_data (bool, optional): sets if to download the data. Defaults to True.
"""
"""

path = save_path + '/' + filename
# if np.int(get_size(save_path) / 1e9) < 1:
if exists(save_path + filename) and not force:
if exists(path) and not force:
print('Using files already downloaded')
else:
print('downloading data')
download_file(url, filename)
download_file(url, path)

if '.zip' in filename:
if os.path.isfile(filename):
print(f'extracting {filename} to {save_path}')
unzip(filename, save_path)
if os.path.isfile(path):
print(f'extracting {path}')
unzip(path, save_path)

0 comments on commit 6ea542b

Please sign in to comment.