From 8070b813a724661f49d6c6e09466ca5eb830de5f Mon Sep 17 00:00:00 2001 From: Dongdong Tian Date: Wed, 27 Mar 2024 15:08:54 +0800 Subject: [PATCH] Format docstrings --- HinetPy/__init__.py | 4 +-- HinetPy/client.py | 63 +++++++++++++++++---------------------- HinetPy/header.py | 11 ++++--- HinetPy/utils.py | 4 +-- HinetPy/win32.py | 72 +++++++++++++++++++++------------------------ 5 files changed, 72 insertions(+), 82 deletions(-) diff --git a/HinetPy/__init__.py b/HinetPy/__init__.py index deaaf5d6..4d5e9da4 100644 --- a/HinetPy/__init__.py +++ b/HinetPy/__init__.py @@ -3,8 +3,8 @@ HinetPy ======= -HinetPy is a Python package to request and process seismic waveform data -from the NIED Hi-net website. +HinetPy is a Python package to request and process seismic waveform data from the NIED +Hi-net website. Basis usage: diff --git a/HinetPy/client.py b/HinetPy/client.py index 2c43d792..dbf32a0c 100644 --- a/HinetPy/client.py +++ b/HinetPy/client.py @@ -97,19 +97,16 @@ def __init__( Notes ----- - The Hi-net server ususally spends 10-60 seconds on data - preparation after receiving a data request. During the data - preparation, users are **NOT** allowed to post another data request. - So users have to wait until the data is ready. - - HinetPy checks data status every ``sleep_time_in_seconds`` seconds for - no more than ``max_sleep_count`` times, until the data is ready. - If the data status is still NOT ready after - ``max_sleep_count * sleep_time_in_seconds`` seconds, - it most likely means something goes wrong with the data request. - Then, HinetPy will retry to request the data ``retries`` times. - Ususally, you don't need to modify these parameters - unless you know what you're doing. + The Hi-net server ususally spends 10-60 seconds on data preparation after + receiving a data request. During the data preparation, users are **NOT** allowed + to post another data request. So users have to wait until the data is ready. + + HinetPy checks data status every ``sleep_time_in_seconds`` seconds for no more + than ``max_sleep_count`` times, until the data is ready. If the data status is + still NOT ready after ``max_sleep_count * sleep_time_in_seconds`` seconds, it + most likely means something goes wrong with the data request. Then, HinetPy will + retry to request the data ``retries`` times. Ususally, you don't need to modify + these parameters unless you know what you're doing. Examples -------- @@ -343,17 +340,16 @@ def get_continuous_waveform( # noqa: PLR0915, PLR0912 span: int Time span in minutes. max_span: int - Maximum time span for sub-requests. Defaults to be determined - automatically. See notes below. + Maximum time span for sub-requests. Defaults to be determined automatically. + See notes below. data: str Filename of downloaded win32 data. Default format: CODE_YYYYmmddHHMM_SPAN.cnt ctable: str - Filename of downloaded channel table file. - Default format: CODE_YYYYmmdd.ch + Filename of downloaded channel table file. Default format: CODE_YYYYmmdd.ch outdir: str - Save win32 and channel table data to a specified directory. - Default is in the current directory. + Save win32 and channel table data to a specified directory. Default is in + the current directory. threads: int Parallel data download using more threads. cleanup: bool @@ -380,11 +376,10 @@ def get_continuous_waveform( # noqa: PLR0915, PLR0912 2. Number_of_channels * Record_Length <= 12000 min 3. Only the latest 150 requested data are kept - For example, Hi-net network has about 24000 channels. Acoording to - limitation 2, the record length should be no more than 5 minutes - for each data request. HinetPy "break through" the limitation by - splitting a long-duration data request into several short-duration - sub-requsts. + For example, Hi-net network has about 24000 channels. Acoording to limitation 2, + the record length should be no more than 5 minutes for each data request. + HinetPy "break through" the limitation by splitting a long-duration data request + into several short-duration sub-requsts. **How it works** @@ -759,13 +754,11 @@ def get_event_waveform( # noqa: PLR0913 longitude: float Specify the longitude to be used for a radius search. minradius: float - Limit to events within the specified minimum number of degrees - from the geographic point defined by the latitude and longitude - parameters. + Limit to events within the specified minimum number of degrees from the + geographic point defined by the latitude and longitude parameters. maxradius: float - Limit to events within the specified maximum number of degrees - from the geographic point defined by the latitude and longitude - parameters. + Limit to events within the specified maximum number of degrees from the + geographic point defined by the latitude and longitude parameters. """ starttime, endtime = to_datetime(starttime), to_datetime(endtime) @@ -1075,13 +1068,11 @@ def select_stations( # noqa: PLR0913 longitude: float Specify the longitude to be used for a radius search. minradius: float - Limit to stations within the specified minimum number of degrees - from the geographic point defined by the latitude and longitude - parameters. + Limit to stations within the specified minimum number of degrees from the + geographic point defined by the latitude and longitude parameters. maxradius: float - Limit to stations within the specified maximum number of degrees - from the geographic point defined by the latitude and longitude - parameters. + Limit to stations within the specified maximum number of degrees from the + geographic point defined by the latitude and longitude parameters. Examples -------- diff --git a/HinetPy/header.py b/HinetPy/header.py index 3bd751d7..b9e35d36 100644 --- a/HinetPy/header.py +++ b/HinetPy/header.py @@ -1,10 +1,13 @@ -"""Basic information of networks.""" +""" +Basic information of networks. +""" from collections import namedtuple from datetime import datetime Network = namedtuple("Network", "name, channels, starttime, url") -""" An object containing information of a network. +""" +An object containing information of a network. .. py:attribute:: name @@ -22,8 +25,8 @@ Homepage of the network. -``NETWORK`` is a dict of :class:`~HinetPy.header.Network`, containing -information of all networks available from Hi-net website. +``NETWORK`` is a dict of :class:`~HinetPy.header.Network`, containing information of all +networks available from Hi-net website. >>> from HinetPy import NETWORK >>> for code in NETWORK.keys(): diff --git a/HinetPy/utils.py b/HinetPy/utils.py index e21e6589..a805acb4 100644 --- a/HinetPy/utils.py +++ b/HinetPy/utils.py @@ -102,8 +102,8 @@ def point_inside_box( def haversine(lat1, lon1, lat2, lon2): """ - Calculate the great circle distance between two points on the earth - (specified in decimal degrees) using haversine formula. + Calculate the great circle distance between two points on the earth (specified in + decimal degrees) using haversine formula. Reference: https://stackoverflow.com/a/4913653/7770208. diff --git a/HinetPy/win32.py b/HinetPy/win32.py index d0fe1ae1..9f1ace88 100644 --- a/HinetPy/win32.py +++ b/HinetPy/win32.py @@ -145,14 +145,14 @@ def extract_sac( Extract data as SAC format files. This function calls the ``win2sac_32`` command, available in the Hi-net win32tools - package, to convert data files from win32 format to SAC fomrat. It can also - extract the channel information as SAC polezero files. + package, to convert data files from win32 format to SAC fomrat. It can also extract + the channel information as SAC polezero files. - Note that the ``win2sac_32`` command always remove the instrument sensitivity - from waveform data, and multiply the data by 1.0e9. Thus, the extracted SAC - files are not in digital counts, but velocity in nm/s, or acceleration in nm/s/s. - Due to the same reason, the extracted SAC polezero files does not keep the - sensitivity in the "CONSTANT" of SAC polezero files. + Note that the ``win2sac_32`` command always remove the instrument sensitivity from + waveform data, and multiply the data by 1.0e9. Thus, the extracted SAC files are not + in digital counts, but velocity in nm/s, or acceleration in nm/s/s. Due to the same + reason, the extracted SAC polezero files does not keep the sensitivity in the + "CONSTANT" of SAC polezero files. Parameters ---------- @@ -165,23 +165,21 @@ def extract_sac( outdir: str Output directory. Defaults to current directory. pmax: int - Maximum number of data points for one channel. Defaults to 8640000. - If one channel has more than 8640000 data points (i.e., longer than - one day for a 100 Hz sampling rate), you MUST increase ``pmax``. + Maximum number of data points for one channel. Defaults to 8640000. If one + channel has more than 8640000 data points (i.e., longer than one day for a + 100 Hz sampling rate), you MUST increase ``pmax``. filter_by_id: list or str Filter channels by ID. It can be a list of IDs or a wildcard. filter_by_name: list or str Filter channels by name. It can be a list of names or a wildcard. filter_by_component: list or str - Filter channels by component. It can be a list of component names or - a wildcard. + Filter channels by component. It can be a list of component names or a wildcard. with_sacpz: bool - Aslo extract SAC PZ files. By default, the suffix is ``.SAC_PZ`` and - the channel sensitivity is not kept in the "CONSTANT". + Aslo extract SAC PZ files. By default, the suffix is ``.SAC_PZ`` and the channel + sensitivity is not kept in the "CONSTANT". processes: None or int - Number of processes to speed up data extraction parallelly. - ``None`` means using all CPUs. - + Number of processes to speed up data extraction parallelly. ``None`` means using + all CPUs. .. deprecated:: 0.7.0 @@ -278,19 +276,18 @@ def extract_sacpz( outdir: str Output directory. Defaults to current directory. keep_sensitivity: bool - The ``win2sac_32`` program automatically removes sensitivity from waveform - data during the win32-to-SAC format conversion. - So the generated polezero file should omit the sensitivity. + The ``win2sac_32`` program automatically removes sensitivity from waveform data + during the win32-to-SAC format conversion. So the generated polezero file should + omit the sensitivity. filter_by_id: list or str Filter channels by ID. It can be a list of IDs or a wildcard. filter_by_name: list or str Filter channels by name. It can be a list of names or a wildcard. filter_by_component: list or str - Filter channels by component. It can be a list of component names or - a wildcard. + Filter channels by component. It can be a list of component names or a wildcard. processes: None or int - Number of processes to speed up data extraction parallelly. - ``None`` means using all CPUs. + Number of processes to speed up data extraction parallelly. ``None`` means using + all CPUs. Examples -------- @@ -405,8 +402,7 @@ def _filter_channels( filter_by_name: list or str Filter channels by name. It can be a list of names or a wildcard. filter_by_component: list or str - Filter channels by component. It can be a list of component names or - a wildcard. + Filter channels by component. It can be a list of component names or a wildcard. """ def _filter(channels, key, filters): @@ -546,13 +542,13 @@ def merge(data, total_data, force_sort=False): The function calls the ``catwin32`` command, available in the Hi-net win32tools package, to merge multiple win32 files into one large win32 file. - By default, the ``catwin32`` command simply concatenates all files in the order - they are passed. So the files must be sorted by their start time before being - passed. If your files are named by starttime like ``201304040203.cnt``, you can use + By default, the ``catwin32`` command simply concatenates all files in the order they + are passed. So the files must be sorted by their start time before being passed. If + your files are named by starttime like ``201304040203.cnt``, you can use ``data=sorted(glob.glob("20130404*.cnt"))`` to pass the sorted list of files. - Otherwise, you have to use ``force_sort=True``, forcing ``catwin32`` to sort - all files by starttime before merging. However, the sorting process is very - time consuming. Do NOT set ``force_sort=True`` unless necessary. + Otherwise, you have to use ``force_sort=True``, forcing ``catwin32`` to sort all + files by starttime before merging. However, the sorting process is very time + consuming. Do NOT set ``force_sort=True`` unless necessary. Parameters ---------- @@ -565,20 +561,20 @@ def merge(data, total_data, force_sort=False): Examples -------- - For win32 files that are named by starttime (e.g. ``201304040203.cnt``), - sorting win32 files using Python's built-in :func:`sorted` function is preferred: + For win32 files that are named by starttime (e.g. ``201304040203.cnt``), sorting + win32 files using Python's built-in :func:`sorted` function is preferred: >>> data = sorted(glob.glob("20130404*.cnt")) >>> merge(data, "outdir/final.cnt") - If win32 files are randomly named, you should use ``force_sort=True`` to - force ``catwin32`` to sort all data by time before merging. + If win32 files are randomly named, you should use ``force_sort=True`` to force + ``catwin32`` to sort all data by time before merging. >>> data = ["001.cnt", "002.cnt", "003.cnt"] >>> merge(data, "final.cnt", force_sort=True) - You can also use wildcard to specify the win32 files to be merged. - The function will sort the matched files for you automatically. + You can also use wildcard to specify the win32 files to be merged. The function will + sort the matched files for you automatically. >>> merge("20130404*.cnt", "final.cnt") """