Open bdigirolamo opened 3 weeks ago
Hi, with a data rate limit at one million bytes per second, I think that the issue is not the data rate itself (i.e. I'm skeptical of the error message). Can you provide me with the notebook or a way to reproduce the error?
HelloThanks for the prompt answer. Could it be also related to my hardware? I have a 2020 iPad 256 GB. Here you find the notebook while for the data you can download it at: https://cernbox.cern.ch/s/sCMJL2bcuBL7m0nThe notebook is not complete with the plotting part, but that’s not an issueThanks for your helpBeniaminoSent from my iPadOn 5 Jun 2024, at 11:49, Nicolas Holzschuch @.***> wrote: Hi, with a data rate limit at one million bytes per second, I think that the issue is not the data rate itself (i.e. I'm skeptical of the error message). Can you provide me with the notebook or a way to reproduce the error?
—Reply to this email directly, view it on GitHub, or unsubscribe.You are receiving this because you authored the thread.Message ID: @.***>
I'm afraid the notebook does not appear here (github removes the pieces attached to e-mail) and I cannot download the data using the link provided either.
All right.
I paste here the code (I couldn’t export it other than cryptical LaTeX, so I opened in emacs. I add down also few lines of the data file that has the following name: PLI01_2024.06.04-RAW.txt
I would have liked to pass you the full file because it is a 2582546 lines file, but if you can’t download I could do better.
Thanks
Beniamino
—————Start of code—————
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"import sys"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"import matplotlib.pyplot as plt\n",
"import matplotlib.gridspec as gridspec\n",
"import matplotlib.dates as mdates"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"import time\n",
"import datetime,pytz\n",
"import calendar\n",
"from datetime import datetime\n",
"\n",
"import numpy as np\n",
"import scipy.signal\n",
"\n",
"import scipy.fftpack as fftpack\n",
"# import pywt\n"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"# matrix initialise\n",
"global date\n",
"global xutc\n",
"global secos\n",
"global inc1\n",
"global inc2\n",
"global ref1\n",
"global ref2\n",
"global ertimesec\n",
"date = []\n",
"secos = []\n",
"inc1 = []\n",
"inc2 = []\n",
"ref1 = []\n",
"ref2 = []\n",
"xutc=[]\n",
"ertimesec =[]"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"data = \"2024.06.04\"\n",
"#b = filepli.split(\"-\")\n",
"c = data.split(\".\")\n",
"dateq = c[0] + \"-\" + c[1] + \"-\" + c[2]\n",
"#print(dateq)\n",
"\n",
"pli = \"PLI01\"\n",
"\n",
"k1 = 350\n",
"k2 = 426\n",
"\n",
"h1 = \"00:00:00.000\"\n",
"# h1 = \"02:20:00.000\"\n",
"h2 = \"02:00:00.000\"\n",
"#h2 = \"23:59:59.999\" \n",
"#fftflag = \"fft\"\n",
"# fftflag = \"no\"\n",
"\n",
"t1 = dateq + \" \" + h1\n",
"t2 = dateq + \" \" + h2\n",
"\n",
"d = data\n"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"accessing file PLI012024.06.04-RAW.txt\n"
]
}
],
"source": [
"filepli = pli + \"\" + data + \"-RAW.txt\"\n",
"print(\"accessing file \", filepli)"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [],
"source": [
"if (pli==\"PLI01\" or pli==\"PLI02\" or pli==\"PLI04\" or pli==\"PLI03\" or pli==\"PLI06\" or pli==\"PLI07\"):\n",
" with open(filepli,\"r\") as quake:\n",
" data = quake.readlines()[:-1]\n",
" quake.close()\n",
" for i, linea in enumerate(data):\n",
" column = linea.split(' ')\n",
" seco = float(column[1][:-3])\n",
" millis = float(column[1][len(column[1])-3:])/1000.\n",
" secop = seco + millis\n",
" secos.append(secop)\n",
" xutc.append(secop)\n",
" wl1 = float(column[2])\n",
" wl2 = float(column[3])\n",
" wl3 = float(column[4])\n",
" wl4 = float(column[5])\n",
" rp1 = float(column[6])\n",
" rp2 = float(column[7])\n",
" rp3 = float(column[8])\n",
" rp4 = float(column[9].rstrip(\"\n\"))\n",
" if (column[5] != 'NaN'):\n",
" inc2cont = float(k2)((wl1+wl4-wl2-wl3)-(rp1+rp4-rp2-rp3))\n",
" ref2cont = float(k2)(rp1+rp4-rp2-rp3)\n",
" inc2.append(inc2cont)\n",
" ref2.append(ref2cont)\n",
" else:\n",
" inc2.append(0)\n",
" ref2.append(0)\n",
" \n",
" if (column[9] != 'NaN'):\n",
" inc1cont = float(k1)((wl3+wl4-wl1-wl2)-(rp3+rp4-rp1-rp2))\n",
" ref1cont = float(k1)(rp3+rp4-rp1-rp2)\n",
" inc1.append(inc1cont)\n",
" ref1.append(ref1cont)\n",
" else:\n",
" inc1.append(0)\n",
" ref1.append(0)"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"max Inc1 signal 14.281050000000004\n",
"max Inc2 signal 42.921554598\n"
]
}
],
"source": [
"if (len(inc1)>0):\n",
" meaninc1 = np.mean(inc1)\n",
" print(\"max Inc1 signal\",np.max(np.absolute(inc1)))\n",
"else:\n",
" meaninc1 = 0\n",
"\n",
" \n",
"if (len(inc2)>0):\n",
" meaninc2 = np.mean(inc2)\n",
" print(\"max Inc2 signal\",np.max(np.absolute(inc2)))\n",
"else:\n",
" meaninc2 = 0\n",
"\n",
"\n",
"\n",
" \n",
"minc1=inc1-meaninc1\n",
"minc2=inc2-meaninc2\n",
"\n",
"set1 = {'Inc1':(xutc,minc1),'Inc2':(xutc,minc2),'Ref1':(xutc,ref1),'Ref2':(xutc,ref2)}"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"# function definitions\n",
"\n",
"# def dwt_filter(signal, thresh = 0.3, wavelet=\"db5\"):\n",
"# thresh = threshnp.nanmax(signal)\n",
"\n",
"# coeff = pywt.wavedec(signal, wavelet, level=10)\n",
"# coeff[1:] = (pywt.threshold(i, value=thresh, mode=\"soft\" ) for i in coeff[1:])\n",
" \n",
"# reconstructed_signal = pywt.waverec(coeff, wavelet)\n",
"# # return reconstructed_signal\n",
"# # dwt always produces an output list of even size. It may have to be truncated by one to \n",
"# # match legnth of initial signal list\n",
"# return reconstructed_signal[:-1] if len(signal) % 2 else reconstructed_signal\n",
"\n",
"\n",
"def butter_bandpass(lowcut, highcut, fs, order=4):\n",
" nyq = 0.5 fs\n",
" low = lowcut / nyq\n",
" high = highcut / nyq\n",
" sos = scipy.signal.butter(order, [low, high], btype='bandpass', output='sos')\n",
" return sos\n",
"\n",
"\n",
"def butter_bandpass_filter(data, lowcut, highcut, fs, new_order=10):\n",
" sos = butter_bandpass(lowcut, highcut, fs, order = new_order)\n",
" y = scipy.signal.sosfilt(sos, data)\n",
" return y\n",
"\n",
"\n",
"\n",
"def move_figure(position=\"top-right\"):\n",
" '''\n",
" Move and resize a window to a set of standard positions on the screen.\n",
" Possible positions are:\n",
" top, bottom, left, right, top-left, top-right, bottom-left, bottom-right\n",
" '''\n",
"\n",
" mgr = plt.get_current_fig_manager()\n",
" #mgr.full_screen_toggle() # primitive but works to get screen size\n",
" py = 2mgr.canvas.height()\n",
" px = 2mgr.canvas.width()\n",
" print(px,py)\n",
"\n",
" d = 10 # width of the window border in pixels\n",
" if position == \"top\":\n",
" # x-top-left-corner, y-top-left-corner, x-width, y-width (in pixels)\n",
" mgr.window.setGeometry(d, 4d, px - 2d, py/2 - 4d)\n",
" elif position == \"bottom\":\n",
" mgr.window.setGeometry(d, py/2 + 5d, px - 2d, py/2 - 4d)\n",
" elif position == \"left\":\n",
" mgr.window.setGeometry(d, 4d, px/2 - 2d, py - 4d)\n",
" elif position == \"right\":\n",
" mgr.window.setGeometry(px/2 + d, 4d, px/2 - 2d, py - 4d)\n",
" elif position == \"top-left\":\n",
" mgr.window.setGeometry(d, 4d, px/2 - 2d, py/2 - 4d)\n",
" elif position == \"top-right\":\n",
" mgr.window.setGeometry(px/2 + d, 4d, px/2 - 2d, py/2 - 4d)\n",
" elif position == \"bottom-left\":\n",
" mgr.window.setGeometry(d, py/2 + 5d, px/2 - 2d, py/2 - 4d)\n",
" elif position == \"bottom-right\":\n",
" mgr.window.setGeometry(px/2 + d, py/2 + 5d, px/2 - 2d, py/2 - 4d)\n",
" \n"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [],
"source": [
"def fillvarpli(pli,ta,tb):\n",
" print(\"processing...\")\n",
" print(\"between \", ta, \" and \", tb)\n",
"\n",
" #year = t1.split(\"-\")\n",
"\n",
" ## UTC time\n",
" dtobj1= datetime.strptime(ta, \"%Y-%m-%d %H:%M:%S.%f\")\n",
" dtobj2= datetime.strptime(tb, \"%Y-%m-%d %H:%M:%S.%f\")\n",
"\n",
" dtobj11=dtobj1.replace(tzinfo=pytz.UTC) #replace method\n",
" dtobj22=dtobj2.replace(tzinfo=pytz.UTC) #replace method\n",
"\n",
" # Convert to local time to fetch from TIMBER\n",
" dtobj_1=dtobj11.astimezone(pytz.timezone(\"Europe/Vienna\")) #astimezone method\n",
" dtobj_2=dtobj22.astimezone(pytz.timezone(\"Europe/Vienna\")) #astimezone method\n",
"\n",
" # t1 and t2 in local time as strings\n",
" tta = dtobj_1.strftime(\"%Y-%m-%d %H:%M:%S.%f\")\n",
" ttb = dtobj_2.strftime(\"%Y-%m-%d %H:%M:%S.%f\")\n",
"\n",
" # difference in seconds between time local and time UTC\n",
" seco = float(calendar.timegm(time.strptime(tta, \"%Y-%m-%d %H:%M:%S.%f\"))-calendar.timegm(time.strptime(t1, \"%Y-%m-%d %H:%M:%S.%f\")))\n",
" print(\"seco =\",seco)\n",
"\n",
" t1b = float(calendar.timegm(time.strptime(ta, \"%Y-%m-%d %H:%M:%S.%f\")))\n",
" t2b = float(calendar.timegm(time.strptime(tb, \"%Y-%m-%d %H:%M:%S.%f\")))\n",
"\n",
"\n",
" inc1='Inc1'\n",
" inc2='Inc2'\n",
" ref1='Ref1'\n",
" ref2='Ref2'\n",
"\n",
"\n",
" tt0,vv0=set1['Inc1']\n",
" tt1,vv1=set1['Inc2']\n",
"\n",
" tt01,ref1=set1['Ref1']\n",
" tt02,ref2=set1['Ref2']\n",
"\n",
" print(\"tto = \",tt0)\n",
"\n",
" # tt0 = tt0 - seco\n",
" # tt1 = tt1 - seco\n",
"\n",
" return tt0,vv0,ref1,tt1,vv1,ref2"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"IOPub data rate exceeded.\n",
"The Jupyter server will temporarily stop sending output\n",
"to the client in order to avoid crashing it.\n",
"To change this limit, set the config variable\n",
"--ServerApp.iopub_data_rate_limit
.\n",
"\n",
"Current values:\n",
"ServerApp.iopub_data_rate_limit=1000000.0 (bytes/sec)\n",
"ServerApp.rate_limit_window=3.0 (secs)\n",
"\n"
]
}
],
"source": [
"# Execution\n",
"tt0_01,vv0_01,ref1_01,tt1_01,vv1_01,ref2_01 = fillvarpli(pli,t1,t2)\n",
"\n",
"\n",
"first = 0\n",
"second = int(len(tt0_01) - 1)\n",
"\n",
"primo = tt0_01[0]\n",
"secondo = tt0_01[len(tt0_01)-1]\n",
"\n",
"majtk = len(vv0_01)/10/6\n",
"mintk = majtk/4\n",
"\n",
"major_ticks = np.arange(tt0_01[first], tt0_01[second], majtk)\n",
"minor_ticks = np.arange(tt0_01[first], tt0_01[second], mintk) "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.0"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
————END of code-----------
00:00:00.000 1717459200.000 0.078724480 0.039318143 0.048483948 0.131284533 0.063933287 0.052287658 0.061287437 0.103761961 00:00:00.033 1717459200.033 0.078592297 0.039438621 0.048571069 0.131187080 0.063932632 0.052287517 0.061286125 0.103762107 00:00:00.067 1717459200.067 0.078577040 0.039366136 0.048518060 0.131360571 0.063931897 0.052286136 0.061284330 0.103762236 00:00:00.100 1717459200.100 0.078572795 0.039294372 0.048501322 0.131458011 0.063930457 0.052286859 0.061287075 0.103763279 00:00:00.133 1717459200.133 0.079023172 0.039137271 0.048292120 0.131403456 0.063932890 0.052287624 0.061284528 0.103763038 00:00:00.167 1717459200.167 0.078505375 0.039542403 0.048597037 0.131128533 0.063934370 0.052288990 0.061287683 0.103763598 00:00:00.200 1717459200.200 0.078621849 0.039386922 0.048629364 0.131129838 0.063929251 0.052286659 0.061284553 0.103763449 00:00:00.233 1717459200.233 0.078781380 0.039234843 0.048413777 0.131401889 0.063928355 0.052285795 0.061285160 0.103764432 00:00:00.267 1717459200.267 0.078736263 0.039367673 0.048420775 0.131292070 0.063930897 0.052286868 0.061285622 0.103762605 00:00:00.300 1717459200.300 0.078614485 0.039462817 0.048522202 0.131202572 0.063928480 0.052287767 0.061284340 0.103763561
On 5 Jun 2024, at 13:54, Nicolas Holzschuch @.***> wrote:
I'm afraid the notebook does not appear here (github removes the pieces attached to e-mail) and I cannot download the data using the link provided either.
— Reply to this email directly, view it on GitHub https://github.com/holzschu/Carnets/issues/328#issuecomment-2149647238, or unsubscribe https://github.com/notifications/unsubscribe-auth/BI6VT52AXAQNSWPEV2N2HGLZF34BBAVCNFSM6AAAAABI2JVZAOVHI2DSMVQWIX3LMV43OSLTON2WKQ3PNVWWK3TUHMZDCNBZGY2DOMRTHA. You are receiving this because you authored the thread.
It is connected to the size of the data file, I sorted it out. I simplified the code, now I have to figure out how to select a subset of data, thing that I was doing with the dictionary reading from an external database, but I have to rethink. Thanks anyway
Hello,
Great app. I have an issue on my iPad as follows:
IOPub data rate exceeded. …. To change this limit, set the config variable
—ServerApp.iopub_data_rate_limit
Current values: ServerApp.iopub_data_rate_limit = 1000000.0 (bytes/sec) ServerApp.rate_limit_window=3.0s (secs)
How can I set this variable? thanks