Use submodule for Gaia EDR3 tool
parent
53439a5cfd
commit
c85e4550a7
|
@ -7,3 +7,6 @@
|
|||
[submodule "thirdparty/fmt"]
|
||||
path = thirdparty/fmt
|
||||
url = https://github.com/fmtlib/fmt.git
|
||||
[submodule "src/tools/celestia-gaia-stardb"]
|
||||
path = src/tools/celestia-gaia-stardb
|
||||
url = https://github.com/ajtribick/celestia-gaia-stardb.git
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Subproject commit d1d29c0cc133ee428ae8ae962aceb1c29b36bf5a
|
|
@ -1,590 +0,0 @@
|
|||
[MASTER]
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code.
|
||||
extension-pkg-whitelist=
|
||||
|
||||
# Add files or directories to the blacklist. They should be base names, not
|
||||
# paths.
|
||||
ignore=CVS
|
||||
|
||||
# Add files or directories matching the regex patterns to the blacklist. The
|
||||
# regex matches against base names, not paths.
|
||||
ignore-patterns=
|
||||
|
||||
# Python code to execute, usually for sys.path manipulation such as
|
||||
# pygtk.require().
|
||||
#init-hook=
|
||||
|
||||
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
|
||||
# number of processors available to use.
|
||||
jobs=1
|
||||
|
||||
# Control the amount of potential inferred values when inferring a single
|
||||
# object. This can help the performance when dealing with large functions or
|
||||
# complex, nested conditions.
|
||||
limit-inference-results=100
|
||||
|
||||
# List of plugins (as comma separated values of python module names) to load,
|
||||
# usually to register additional checkers.
|
||||
load-plugins=
|
||||
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=yes
|
||||
|
||||
# Specify a configuration file.
|
||||
#rcfile=
|
||||
|
||||
# When enabled, pylint would attempt to guess common misconfiguration and emit
|
||||
# user-friendly hints instead of false-positive error messages.
|
||||
suggestion-mode=yes
|
||||
|
||||
# Allow loading of arbitrary C extensions. Extensions are imported into the
|
||||
# active Python interpreter and may run arbitrary code.
|
||||
unsafe-load-any-extension=no
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
|
||||
confidence=
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once). You can also use "--disable=all" to
|
||||
# disable everything first and then reenable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use "--disable=all --enable=classes
|
||||
# --disable=W".
|
||||
disable=print-statement,
|
||||
parameter-unpacking,
|
||||
unpacking-in-except,
|
||||
old-raise-syntax,
|
||||
backtick,
|
||||
long-suffix,
|
||||
old-ne-operator,
|
||||
old-octal-literal,
|
||||
import-star-module-level,
|
||||
non-ascii-bytes-literal,
|
||||
raw-checker-failed,
|
||||
bad-inline-option,
|
||||
locally-disabled,
|
||||
file-ignored,
|
||||
suppressed-message,
|
||||
useless-suppression,
|
||||
deprecated-pragma,
|
||||
use-symbolic-message-instead,
|
||||
apply-builtin,
|
||||
basestring-builtin,
|
||||
buffer-builtin,
|
||||
cmp-builtin,
|
||||
coerce-builtin,
|
||||
execfile-builtin,
|
||||
file-builtin,
|
||||
long-builtin,
|
||||
raw_input-builtin,
|
||||
reduce-builtin,
|
||||
standarderror-builtin,
|
||||
unicode-builtin,
|
||||
xrange-builtin,
|
||||
coerce-method,
|
||||
delslice-method,
|
||||
getslice-method,
|
||||
setslice-method,
|
||||
no-absolute-import,
|
||||
old-division,
|
||||
dict-iter-method,
|
||||
dict-view-method,
|
||||
next-method-called,
|
||||
metaclass-assignment,
|
||||
indexing-exception,
|
||||
raising-string,
|
||||
reload-builtin,
|
||||
oct-method,
|
||||
hex-method,
|
||||
nonzero-method,
|
||||
cmp-method,
|
||||
input-builtin,
|
||||
round-builtin,
|
||||
intern-builtin,
|
||||
unichr-builtin,
|
||||
map-builtin-not-iterating,
|
||||
zip-builtin-not-iterating,
|
||||
range-builtin-not-iterating,
|
||||
filter-builtin-not-iterating,
|
||||
using-cmp-argument,
|
||||
eq-without-hash,
|
||||
div-method,
|
||||
idiv-method,
|
||||
rdiv-method,
|
||||
exception-message-attribute,
|
||||
invalid-str-codec,
|
||||
sys-max-int,
|
||||
bad-python3-import,
|
||||
deprecated-string-function,
|
||||
deprecated-str-translate-call,
|
||||
deprecated-itertools-function,
|
||||
deprecated-types-field,
|
||||
next-method-defined,
|
||||
dict-items-not-iterating,
|
||||
dict-keys-not-iterating,
|
||||
dict-values-not-iterating,
|
||||
deprecated-operator-function,
|
||||
deprecated-urllib-function,
|
||||
xreadlines-attribute,
|
||||
deprecated-sys-function,
|
||||
exception-escape,
|
||||
comprehension-escape
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time (only on the command line, not in the configuration file where
|
||||
# it should appear only once). See also the "--disable" option for examples.
|
||||
enable=c-extension-no-member
|
||||
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# Python expression which should return a score less than or equal to 10. You
|
||||
# have access to the variables 'error', 'warning', 'refactor', and 'convention'
|
||||
# which contain the number of messages in each category, as well as 'statement'
|
||||
# which is the total number of statements analyzed. This score is used by the
|
||||
# global evaluation report (RP0004).
|
||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
||||
|
||||
# Template used to display messages. This is a python new-style format string
|
||||
# used to format the message information. See doc for all details.
|
||||
#msg-template=
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, json
|
||||
# and msvs (visual studio). You can also give a reporter class, e.g.
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
output-format=text
|
||||
|
||||
# Tells whether to display a full report or only the messages.
|
||||
reports=no
|
||||
|
||||
# Activate the evaluation score.
|
||||
score=yes
|
||||
|
||||
|
||||
[REFACTORING]
|
||||
|
||||
# Maximum number of nested blocks for function / method body
|
||||
max-nested-blocks=5
|
||||
|
||||
# Complete name of functions that never returns. When checking for
|
||||
# inconsistent-return-statements if a never returning function is called then
|
||||
# it will be considered as an explicit return statement and no message will be
|
||||
# printed.
|
||||
never-returning-functions=sys.exit
|
||||
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
# Ignore comments when computing similarities.
|
||||
ignore-comments=yes
|
||||
|
||||
# Ignore docstrings when computing similarities.
|
||||
ignore-docstrings=yes
|
||||
|
||||
# Ignore imports when computing similarities.
|
||||
ignore-imports=no
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=4
|
||||
|
||||
|
||||
[STRING]
|
||||
|
||||
# This flag controls whether the implicit-str-concat-in-sequence should
|
||||
# generate a warning on implicit string concatenation in sequences defined over
|
||||
# several lines.
|
||||
check-str-concat-over-line-jumps=no
|
||||
|
||||
|
||||
[VARIABLES]
|
||||
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid defining new builtins when possible.
|
||||
additional-builtins=
|
||||
|
||||
# Tells whether unused global variables should be treated as a violation.
|
||||
allow-global-unused-variables=yes
|
||||
|
||||
# List of strings which can identify a callback function by name. A callback
|
||||
# name must start or end with one of those strings.
|
||||
callbacks=cb_,
|
||||
_cb
|
||||
|
||||
# A regular expression matching the name of dummy variables (i.e. expected to
|
||||
# not be used).
|
||||
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
|
||||
|
||||
# Argument names that match this expression will be ignored. Default to name
|
||||
# with leading underscore.
|
||||
ignored-argument-names=_.*|^ignored_|^unused_
|
||||
|
||||
# Tells whether we should check for unused import in __init__ files.
|
||||
init-import=no
|
||||
|
||||
# List of qualified module names which can have objects that can redefine
|
||||
# builtins.
|
||||
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
|
||||
|
||||
|
||||
[BASIC]
|
||||
|
||||
# Naming style matching correct argument names.
|
||||
argument-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct argument names. Overrides argument-
|
||||
# naming-style.
|
||||
#argument-rgx=
|
||||
|
||||
# Naming style matching correct attribute names.
|
||||
attr-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct attribute names. Overrides attr-naming-
|
||||
# style.
|
||||
#attr-rgx=
|
||||
|
||||
# Bad variable names which should always be refused, separated by a comma.
|
||||
bad-names=foo,
|
||||
bar,
|
||||
baz,
|
||||
toto,
|
||||
tutu,
|
||||
tata
|
||||
|
||||
# Naming style matching correct class attribute names.
|
||||
class-attribute-naming-style=any
|
||||
|
||||
# Regular expression matching correct class attribute names. Overrides class-
|
||||
# attribute-naming-style.
|
||||
#class-attribute-rgx=
|
||||
|
||||
# Naming style matching correct class names.
|
||||
class-naming-style=PascalCase
|
||||
|
||||
# Regular expression matching correct class names. Overrides class-naming-
|
||||
# style.
|
||||
#class-rgx=
|
||||
|
||||
# Naming style matching correct constant names.
|
||||
const-naming-style=UPPER_CASE
|
||||
|
||||
# Regular expression matching correct constant names. Overrides const-naming-
|
||||
# style.
|
||||
#const-rgx=
|
||||
|
||||
# Minimum line length for functions/classes that require docstrings, shorter
|
||||
# ones are exempt.
|
||||
docstring-min-length=-1
|
||||
|
||||
# Naming style matching correct function names.
|
||||
function-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct function names. Overrides function-
|
||||
# naming-style.
|
||||
#function-rgx=
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma.
|
||||
good-names=f,
|
||||
i,
|
||||
j,
|
||||
k,
|
||||
x,
|
||||
y,
|
||||
z,
|
||||
ex,
|
||||
hd,
|
||||
ra,
|
||||
rv,
|
||||
tf,
|
||||
zf,
|
||||
Run,
|
||||
_
|
||||
|
||||
# Include a hint for the correct naming format with invalid-name.
|
||||
include-naming-hint=no
|
||||
|
||||
# Naming style matching correct inline iteration names.
|
||||
inlinevar-naming-style=any
|
||||
|
||||
# Regular expression matching correct inline iteration names. Overrides
|
||||
# inlinevar-naming-style.
|
||||
#inlinevar-rgx=
|
||||
|
||||
# Naming style matching correct method names.
|
||||
method-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct method names. Overrides method-naming-
|
||||
# style.
|
||||
#method-rgx=
|
||||
|
||||
# Naming style matching correct module names.
|
||||
module-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct module names. Overrides module-naming-
|
||||
# style.
|
||||
#module-rgx=
|
||||
|
||||
# Colon-delimited sets of names that determine each other's naming style when
|
||||
# the name regexes allow several styles.
|
||||
name-group=
|
||||
|
||||
# Regular expression which should only match function or class names that do
|
||||
# not require a docstring.
|
||||
no-docstring-rgx=^_
|
||||
|
||||
# List of decorators that produce properties, such as abc.abstractproperty. Add
|
||||
# to this list to register other decorators that produce valid properties.
|
||||
# These decorators are taken in consideration only for invalid-name.
|
||||
property-classes=abc.abstractproperty
|
||||
|
||||
# Naming style matching correct variable names.
|
||||
variable-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct variable names. Overrides variable-
|
||||
# naming-style.
|
||||
#variable-rgx=
|
||||
|
||||
|
||||
[LOGGING]
|
||||
|
||||
# Format style used to check logging format string. `old` means using %
|
||||
# formatting, `new` is for `{}` formatting,and `fstr` is for f-strings.
|
||||
logging-format-style=old
|
||||
|
||||
# Logging modules to check that the string format arguments are in logging
|
||||
# function parameter format.
|
||||
logging-modules=logging
|
||||
|
||||
|
||||
[SPELLING]
|
||||
|
||||
# Limits count of emitted suggestions for spelling mistakes.
|
||||
max-spelling-suggestions=4
|
||||
|
||||
# Spelling dictionary name. Available dictionaries: none. To make it work,
|
||||
# install the python-enchant package.
|
||||
spelling-dict=
|
||||
|
||||
# List of comma separated words that should not be checked.
|
||||
spelling-ignore-words=
|
||||
|
||||
# A path to a file that contains the private dictionary; one word per line.
|
||||
spelling-private-dict-file=
|
||||
|
||||
# Tells whether to store unknown words to the private dictionary (see the
|
||||
# --spelling-private-dict-file option) instead of raising a message.
|
||||
spelling-store-unknown-words=no
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
|
||||
# List of decorators that produce context managers, such as
|
||||
# contextlib.contextmanager. Add to this list to register other decorators that
|
||||
# produce valid context managers.
|
||||
contextmanager-decorators=contextlib.contextmanager
|
||||
|
||||
# List of members which are set dynamically and missed by pylint inference
|
||||
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
||||
# expressions are accepted.
|
||||
generated-members=
|
||||
|
||||
# Tells whether missing members accessed in mixin class should be ignored. A
|
||||
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
||||
ignore-mixin-members=yes
|
||||
|
||||
# Tells whether to warn about missing members when the owner of the attribute
|
||||
# is inferred to be None.
|
||||
ignore-none=yes
|
||||
|
||||
# This flag controls whether pylint should warn about no-member and similar
|
||||
# checks whenever an opaque object is returned when inferring. The inference
|
||||
# can return multiple potential results while evaluating a Python object, but
|
||||
# some branches might not be evaluated, which results in partial inference. In
|
||||
# that case, it might be useful to still emit no-member and other checks for
|
||||
# the rest of the inferred objects.
|
||||
ignore-on-opaque-inference=yes
|
||||
|
||||
# List of class names for which member attributes should not be checked (useful
|
||||
# for classes with dynamically set attributes). This supports the use of
|
||||
# qualified names.
|
||||
ignored-classes=optparse.Values,thread._local,_thread._local,astropy.units
|
||||
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis). It
|
||||
# supports qualified module names, as well as Unix pattern matching.
|
||||
ignored-modules=
|
||||
|
||||
# Show a hint with possible names when a member name was not found. The aspect
|
||||
# of finding the hint is based on edit distance.
|
||||
missing-member-hint=yes
|
||||
|
||||
# The minimum edit distance a name should have in order to be considered a
|
||||
# similar match for a missing member name.
|
||||
missing-member-hint-distance=1
|
||||
|
||||
# The total number of similar names that should be taken in consideration when
|
||||
# showing a hint for a missing member.
|
||||
missing-member-max-choices=1
|
||||
|
||||
# List of decorators that change the signature of a decorated function.
|
||||
signature-mutators=
|
||||
|
||||
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=FIXME,
|
||||
XXX,
|
||||
TODO
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||
expected-line-ending-format=
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
# Number of spaces of indent required inside a hanging or continued line.
|
||||
indent-after-paren=4
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=' '
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=100
|
||||
|
||||
# Maximum number of lines in a module.
|
||||
max-module-lines=1000
|
||||
|
||||
# List of optional constructs for which whitespace checking is disabled. `dict-
|
||||
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
|
||||
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
|
||||
# `empty-line` allows space-only lines.
|
||||
no-space-check=trailing-comma,
|
||||
dict-separator
|
||||
|
||||
# Allow the body of a class to be on the same line as the declaration if body
|
||||
# contains single statement.
|
||||
single-line-class-stmt=no
|
||||
|
||||
# Allow the body of an if to be on the same line as the test if there is no
|
||||
# else.
|
||||
single-line-if-stmt=no
|
||||
|
||||
|
||||
[CLASSES]
|
||||
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,
|
||||
__new__,
|
||||
setUp,
|
||||
__post_init__
|
||||
|
||||
# List of member names, which should be excluded from the protected access
|
||||
# warning.
|
||||
exclude-protected=_asdict,
|
||||
_fields,
|
||||
_replace,
|
||||
_source,
|
||||
_make
|
||||
|
||||
# List of valid names for the first argument in a class method.
|
||||
valid-classmethod-first-arg=cls
|
||||
|
||||
# List of valid names for the first argument in a metaclass class method.
|
||||
valid-metaclass-classmethod-first-arg=cls
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
|
||||
# List of modules that can be imported at any level, not just the top level
|
||||
# one.
|
||||
allow-any-import-level=
|
||||
|
||||
# Allow wildcard imports from modules that define __all__.
|
||||
allow-wildcard-with-all=no
|
||||
|
||||
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||
# 3 compatible code, which means that the block might have code that exists
|
||||
# only in one or another interpreter, leading to false positives when analysed.
|
||||
analyse-fallback-blocks=no
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma.
|
||||
deprecated-modules=optparse,tkinter.tix
|
||||
|
||||
# Create a graph of external dependencies in the given file (report RP0402 must
|
||||
# not be disabled).
|
||||
ext-import-graph=
|
||||
|
||||
# Create a graph of every (i.e. internal and external) dependencies in the
|
||||
# given file (report RP0402 must not be disabled).
|
||||
import-graph=
|
||||
|
||||
# Create a graph of internal dependencies in the given file (report RP0402 must
|
||||
# not be disabled).
|
||||
int-import-graph=
|
||||
|
||||
# Force import order to recognize a module as part of the standard
|
||||
# compatibility libraries.
|
||||
known-standard-library=
|
||||
|
||||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=enchant
|
||||
|
||||
# Couples of modules and preferred modules, separated by a comma.
|
||||
preferred-modules=
|
||||
|
||||
|
||||
[DESIGN]
|
||||
|
||||
# Maximum number of arguments for function / method.
|
||||
max-args=5
|
||||
|
||||
# Maximum number of attributes for a class (see R0902).
|
||||
max-attributes=7
|
||||
|
||||
# Maximum number of boolean expressions in an if statement (see R0916).
|
||||
max-bool-expr=5
|
||||
|
||||
# Maximum number of branch for function / method body.
|
||||
max-branches=12
|
||||
|
||||
# Maximum number of locals for function / method body.
|
||||
max-locals=15
|
||||
|
||||
# Maximum number of parents for a class (see R0901).
|
||||
max-parents=7
|
||||
|
||||
# Maximum number of public methods for a class (see R0904).
|
||||
max-public-methods=20
|
||||
|
||||
# Maximum number of return / yield for function / method body.
|
||||
max-returns=6
|
||||
|
||||
# Maximum number of statements in function / method body.
|
||||
max-statements=50
|
||||
|
||||
# Minimum number of public methods for a class (see R0903).
|
||||
min-public-methods=2
|
||||
|
||||
|
||||
[EXCEPTIONS]
|
||||
|
||||
# Exceptions that will emit a warning when being caught. Defaults to
|
||||
# "BaseException, Exception".
|
||||
overgeneral-exceptions=BaseException,
|
||||
Exception
|
|
@ -1,50 +0,0 @@
|
|||
# Changelog
|
||||
|
||||
## v1.0.4
|
||||
|
||||
* Prefer XHIP distances where the relative parallax error is smaller than the
|
||||
Gaia value.
|
||||
* Fix Pylint warnings.
|
||||
* Refactor common parsing functionality.
|
||||
|
||||
## v1.0.3.1
|
||||
|
||||
* Update reference list for v1.0.3 changes.
|
||||
|
||||
## v1.0.3
|
||||
|
||||
* Use Tycho-HD cross index (VizieR IV/25) to provide HD identifications for
|
||||
TYC stars instead of ASCC.
|
||||
* Preferentially use the HD identifications in the SAO catalogue as the basis
|
||||
for the SAO cross-index.
|
||||
* Merge missing data when combining HIP and TYC subsets
|
||||
|
||||
## v1.0.2
|
||||
|
||||
* Fix an issue where missing spectral types were being converted to O.
|
||||
|
||||
## v1.0.1
|
||||
|
||||
* Remove duplicate HIP/TYC entries matched to the same Gaia DR2 ID.
|
||||
* Use ASCC spectral types when Tycho-2 spectral type catalogue does not
|
||||
contain an entry.
|
||||
* Use New spectral types for Tycho-2 stars to correct some XHIP spectral
|
||||
types.
|
||||
* Cross-index SAO with stars in Tycho-2 supplements.
|
||||
|
||||
## v1.0.0
|
||||
|
||||
* More comprehensive HIP and TYC cross-matching (see README for references).
|
||||
* Various code improvements.
|
||||
|
||||
## v0.1.2-alpha
|
||||
|
||||
* Fixes for duplicates in star database and cross-index files.
|
||||
|
||||
## v0.1.1-alpha
|
||||
|
||||
* Initial release with license files included
|
||||
|
||||
## v0.1.0-alpha
|
||||
|
||||
* Initial release
|
|
@ -1,339 +0,0 @@
|
|||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 2, June 1991
|
||||
|
||||
Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The licenses for most software are designed to take away your
|
||||
freedom to share and change it. By contrast, the GNU General Public
|
||||
License is intended to guarantee your freedom to share and change free
|
||||
software--to make sure the software is free for all its users. This
|
||||
General Public License applies to most of the Free Software
|
||||
Foundation's software and to any other program whose authors commit to
|
||||
using it. (Some other Free Software Foundation software is covered by
|
||||
the GNU Lesser General Public License instead.) You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
this service if you wish), that you receive source code or can get it
|
||||
if you want it, that you can change the software or use pieces of it
|
||||
in new free programs; and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to make restrictions that forbid
|
||||
anyone to deny you these rights or to ask you to surrender the rights.
|
||||
These restrictions translate to certain responsibilities for you if you
|
||||
distribute copies of the software, or if you modify it.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must give the recipients all the rights that
|
||||
you have. You must make sure that they, too, receive or can get the
|
||||
source code. And you must show them these terms so they know their
|
||||
rights.
|
||||
|
||||
We protect your rights with two steps: (1) copyright the software, and
|
||||
(2) offer you this license which gives you legal permission to copy,
|
||||
distribute and/or modify the software.
|
||||
|
||||
Also, for each author's protection and ours, we want to make certain
|
||||
that everyone understands that there is no warranty for this free
|
||||
software. If the software is modified by someone else and passed on, we
|
||||
want its recipients to know that what they have is not the original, so
|
||||
that any problems introduced by others will not reflect on the original
|
||||
authors' reputations.
|
||||
|
||||
Finally, any free program is threatened constantly by software
|
||||
patents. We wish to avoid the danger that redistributors of a free
|
||||
program will individually obtain patent licenses, in effect making the
|
||||
program proprietary. To prevent this, we have made it clear that any
|
||||
patent must be licensed for everyone's free use or not licensed at all.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. This License applies to any program or other work which contains
|
||||
a notice placed by the copyright holder saying it may be distributed
|
||||
under the terms of this General Public License. The "Program", below,
|
||||
refers to any such program or work, and a "work based on the Program"
|
||||
means either the Program or any derivative work under copyright law:
|
||||
that is to say, a work containing the Program or a portion of it,
|
||||
either verbatim or with modifications and/or translated into another
|
||||
language. (Hereinafter, translation is included without limitation in
|
||||
the term "modification".) Each licensee is addressed as "you".
|
||||
|
||||
Activities other than copying, distribution and modification are not
|
||||
covered by this License; they are outside its scope. The act of
|
||||
running the Program is not restricted, and the output from the Program
|
||||
is covered only if its contents constitute a work based on the
|
||||
Program (independent of having been made by running the Program).
|
||||
Whether that is true depends on what the Program does.
|
||||
|
||||
1. You may copy and distribute verbatim copies of the Program's
|
||||
source code as you receive it, in any medium, provided that you
|
||||
conspicuously and appropriately publish on each copy an appropriate
|
||||
copyright notice and disclaimer of warranty; keep intact all the
|
||||
notices that refer to this License and to the absence of any warranty;
|
||||
and give any other recipients of the Program a copy of this License
|
||||
along with the Program.
|
||||
|
||||
You may charge a fee for the physical act of transferring a copy, and
|
||||
you may at your option offer warranty protection in exchange for a fee.
|
||||
|
||||
2. You may modify your copy or copies of the Program or any portion
|
||||
of it, thus forming a work based on the Program, and copy and
|
||||
distribute such modifications or work under the terms of Section 1
|
||||
above, provided that you also meet all of these conditions:
|
||||
|
||||
a) You must cause the modified files to carry prominent notices
|
||||
stating that you changed the files and the date of any change.
|
||||
|
||||
b) You must cause any work that you distribute or publish, that in
|
||||
whole or in part contains or is derived from the Program or any
|
||||
part thereof, to be licensed as a whole at no charge to all third
|
||||
parties under the terms of this License.
|
||||
|
||||
c) If the modified program normally reads commands interactively
|
||||
when run, you must cause it, when started running for such
|
||||
interactive use in the most ordinary way, to print or display an
|
||||
announcement including an appropriate copyright notice and a
|
||||
notice that there is no warranty (or else, saying that you provide
|
||||
a warranty) and that users may redistribute the program under
|
||||
these conditions, and telling the user how to view a copy of this
|
||||
License. (Exception: if the Program itself is interactive but
|
||||
does not normally print such an announcement, your work based on
|
||||
the Program is not required to print an announcement.)
|
||||
|
||||
These requirements apply to the modified work as a whole. If
|
||||
identifiable sections of that work are not derived from the Program,
|
||||
and can be reasonably considered independent and separate works in
|
||||
themselves, then this License, and its terms, do not apply to those
|
||||
sections when you distribute them as separate works. But when you
|
||||
distribute the same sections as part of a whole which is a work based
|
||||
on the Program, the distribution of the whole must be on the terms of
|
||||
this License, whose permissions for other licensees extend to the
|
||||
entire whole, and thus to each and every part regardless of who wrote it.
|
||||
|
||||
Thus, it is not the intent of this section to claim rights or contest
|
||||
your rights to work written entirely by you; rather, the intent is to
|
||||
exercise the right to control the distribution of derivative or
|
||||
collective works based on the Program.
|
||||
|
||||
In addition, mere aggregation of another work not based on the Program
|
||||
with the Program (or with a work based on the Program) on a volume of
|
||||
a storage or distribution medium does not bring the other work under
|
||||
the scope of this License.
|
||||
|
||||
3. You may copy and distribute the Program (or a work based on it,
|
||||
under Section 2) in object code or executable form under the terms of
|
||||
Sections 1 and 2 above provided that you also do one of the following:
|
||||
|
||||
a) Accompany it with the complete corresponding machine-readable
|
||||
source code, which must be distributed under the terms of Sections
|
||||
1 and 2 above on a medium customarily used for software interchange; or,
|
||||
|
||||
b) Accompany it with a written offer, valid for at least three
|
||||
years, to give any third party, for a charge no more than your
|
||||
cost of physically performing source distribution, a complete
|
||||
machine-readable copy of the corresponding source code, to be
|
||||
distributed under the terms of Sections 1 and 2 above on a medium
|
||||
customarily used for software interchange; or,
|
||||
|
||||
c) Accompany it with the information you received as to the offer
|
||||
to distribute corresponding source code. (This alternative is
|
||||
allowed only for noncommercial distribution and only if you
|
||||
received the program in object code or executable form with such
|
||||
an offer, in accord with Subsection b above.)
|
||||
|
||||
The source code for a work means the preferred form of the work for
|
||||
making modifications to it. For an executable work, complete source
|
||||
code means all the source code for all modules it contains, plus any
|
||||
associated interface definition files, plus the scripts used to
|
||||
control compilation and installation of the executable. However, as a
|
||||
special exception, the source code distributed need not include
|
||||
anything that is normally distributed (in either source or binary
|
||||
form) with the major components (compiler, kernel, and so on) of the
|
||||
operating system on which the executable runs, unless that component
|
||||
itself accompanies the executable.
|
||||
|
||||
If distribution of executable or object code is made by offering
|
||||
access to copy from a designated place, then offering equivalent
|
||||
access to copy the source code from the same place counts as
|
||||
distribution of the source code, even though third parties are not
|
||||
compelled to copy the source along with the object code.
|
||||
|
||||
4. You may not copy, modify, sublicense, or distribute the Program
|
||||
except as expressly provided under this License. Any attempt
|
||||
otherwise to copy, modify, sublicense or distribute the Program is
|
||||
void, and will automatically terminate your rights under this License.
|
||||
However, parties who have received copies, or rights, from you under
|
||||
this License will not have their licenses terminated so long as such
|
||||
parties remain in full compliance.
|
||||
|
||||
5. You are not required to accept this License, since you have not
|
||||
signed it. However, nothing else grants you permission to modify or
|
||||
distribute the Program or its derivative works. These actions are
|
||||
prohibited by law if you do not accept this License. Therefore, by
|
||||
modifying or distributing the Program (or any work based on the
|
||||
Program), you indicate your acceptance of this License to do so, and
|
||||
all its terms and conditions for copying, distributing or modifying
|
||||
the Program or works based on it.
|
||||
|
||||
6. Each time you redistribute the Program (or any work based on the
|
||||
Program), the recipient automatically receives a license from the
|
||||
original licensor to copy, distribute or modify the Program subject to
|
||||
these terms and conditions. You may not impose any further
|
||||
restrictions on the recipients' exercise of the rights granted herein.
|
||||
You are not responsible for enforcing compliance by third parties to
|
||||
this License.
|
||||
|
||||
7. If, as a consequence of a court judgment or allegation of patent
|
||||
infringement or for any other reason (not limited to patent issues),
|
||||
conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot
|
||||
distribute so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you
|
||||
may not distribute the Program at all. For example, if a patent
|
||||
license would not permit royalty-free redistribution of the Program by
|
||||
all those who receive copies directly or indirectly through you, then
|
||||
the only way you could satisfy both it and this License would be to
|
||||
refrain entirely from distribution of the Program.
|
||||
|
||||
If any portion of this section is held invalid or unenforceable under
|
||||
any particular circumstance, the balance of the section is intended to
|
||||
apply and the section as a whole is intended to apply in other
|
||||
circumstances.
|
||||
|
||||
It is not the purpose of this section to induce you to infringe any
|
||||
patents or other property right claims or to contest validity of any
|
||||
such claims; this section has the sole purpose of protecting the
|
||||
integrity of the free software distribution system, which is
|
||||
implemented by public license practices. Many people have made
|
||||
generous contributions to the wide range of software distributed
|
||||
through that system in reliance on consistent application of that
|
||||
system; it is up to the author/donor to decide if he or she is willing
|
||||
to distribute software through any other system and a licensee cannot
|
||||
impose that choice.
|
||||
|
||||
This section is intended to make thoroughly clear what is believed to
|
||||
be a consequence of the rest of this License.
|
||||
|
||||
8. If the distribution and/or use of the Program is restricted in
|
||||
certain countries either by patents or by copyrighted interfaces, the
|
||||
original copyright holder who places the Program under this License
|
||||
may add an explicit geographical distribution limitation excluding
|
||||
those countries, so that distribution is permitted only in or among
|
||||
countries not thus excluded. In such case, this License incorporates
|
||||
the limitation as if written in the body of this License.
|
||||
|
||||
9. The Free Software Foundation may publish revised and/or new versions
|
||||
of the General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the Program
|
||||
specifies a version number of this License which applies to it and "any
|
||||
later version", you have the option of following the terms and conditions
|
||||
either of that version or of any later version published by the Free
|
||||
Software Foundation. If the Program does not specify a version number of
|
||||
this License, you may choose any version ever published by the Free Software
|
||||
Foundation.
|
||||
|
||||
10. If you wish to incorporate parts of the Program into other free
|
||||
programs whose distribution conditions are different, write to the author
|
||||
to ask for permission. For software which is copyrighted by the Free
|
||||
Software Foundation, write to the Free Software Foundation; we sometimes
|
||||
make exceptions for this. Our decision will be guided by the two goals
|
||||
of preserving the free status of all derivatives of our free software and
|
||||
of promoting the sharing and reuse of software generally.
|
||||
|
||||
NO WARRANTY
|
||||
|
||||
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
|
||||
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
|
||||
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
|
||||
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
|
||||
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
|
||||
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
|
||||
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
|
||||
REPAIR OR CORRECTION.
|
||||
|
||||
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
|
||||
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
|
||||
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
|
||||
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
|
||||
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
|
||||
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
|
||||
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGES.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
convey the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation; either version 2 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License along
|
||||
with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program is interactive, make it output a short notice like this
|
||||
when it starts in an interactive mode:
|
||||
|
||||
Gnomovision version 69, Copyright (C) year name of author
|
||||
Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, the commands you use may
|
||||
be called something other than `show w' and `show c'; they could even be
|
||||
mouse-clicks or menu items--whatever suits your program.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or your
|
||||
school, if any, to sign a "copyright disclaimer" for the program, if
|
||||
necessary. Here is a sample; alter the names:
|
||||
|
||||
Yoyodyne, Inc., hereby disclaims all copyright interest in the program
|
||||
`Gnomovision' (which makes passes at compilers) written by James Hacker.
|
||||
|
||||
<signature of Ty Coon>, 1 April 1989
|
||||
Ty Coon, President of Vice
|
||||
|
||||
This General Public License does not permit incorporating your program into
|
||||
proprietary programs. If your program is a subroutine library, you may
|
||||
consider it more useful to permit linking proprietary applications with the
|
||||
library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License.
|
|
@ -1,207 +0,0 @@
|
|||
Gaia DR2 for celestia.Sci/Celestia
|
||||
==================================
|
||||
|
||||
This repository contains Python scripts to generate a celestia.Sci/Celestia
|
||||
star database from the *Gaia* Data Release 2 data, supplemented by the XHIP
|
||||
catalogue.
|
||||
|
||||
In order to limit the download size required and to maintain compatibility
|
||||
with the use of HIP/TYC2 identifiers as the primary key for stars in
|
||||
celestia.Sci/Celestia, only Gaia data for HIP and TYC2 stars is processed.
|
||||
|
||||
**Unless you are editing the code, it is recommended to use the pre-built
|
||||
files in the release rather than running these scripts manually.** The data
|
||||
files in the release may be used under a CC-BY-SA 4.0 license
|
||||
(https://creativecommons.org/licenses/by-sa/4.0/legalcode)
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Internet connection for downloading the data
|
||||
- *Gaia* archive account (https://gea.esac.esa.int/archive/)
|
||||
- Python 3.6 or higher (preferably 64-bit, as the memory usage can be quite
|
||||
high)
|
||||
- celestia.Sci/Celestia
|
||||
|
||||
## Folder contents
|
||||
|
||||
- `download_data.py`: script to download the data files
|
||||
- `make_stardb.py`: script to build the star database and cross-index files
|
||||
|
||||
## How to use
|
||||
|
||||
Please ensure you have read through the Prerequisites section and all the
|
||||
steps below **before** you begin.
|
||||
|
||||
1. Clone or download this repository
|
||||
2. Open a command window in the repository directory
|
||||
3. Set up a Python 3 virtual environment
|
||||
|
||||
`python3 -m venv myenv`
|
||||
|
||||
4. Switch to the virtual environment and install the requirements
|
||||
|
||||
`source myenv/bin/activate`
|
||||
|
||||
5. Install the requirements:
|
||||
|
||||
`python -m pip install -r requirements.txt`
|
||||
|
||||
6. Run the download script. You will need your *Gaia* archive login.
|
||||
**This step may take several hours!**
|
||||
|
||||
python download_data.py
|
||||
|
||||
7. Run the build script.
|
||||
|
||||
python make_stardb.py
|
||||
|
||||
8. The stars.dat, hdxindex.dat and saoxindex.dat files will be written into
|
||||
the output folder
|
||||
|
||||
9. Copy the files into the `data` folder of the celestia.Sci/Celestia
|
||||
distribution.
|
||||
|
||||
## References
|
||||
|
||||
### Source catalogues
|
||||
|
||||
- *Gaia* Data Release 2 (https://gea.esac.esa.int/archive/)
|
||||
- *Gaia* Collaboration et al. (2016), A&A 595, id.A1, "The *Gaia* mission"
|
||||
- *Gaia* Collaboration et al. (2018), A&A 616, id.A1, "*Gaia* Data
|
||||
Release 2. Summary of the contents and survey properties"
|
||||
- Andrae et al. (2018), A&A 616, id.A8, "*Gaia* Data Release 2. First
|
||||
stellar parameters from Apsis"
|
||||
- Marrese et al. (2018), A&A 621, id.A144, "*Gaia* Data Release 2.
|
||||
Cross-match with external catalogues: algorithms and results"
|
||||
|
||||
- *Gaia* Data Release 2 Geometric Distances
|
||||
(http://www.mpia.de/~calj/gdr2_distances/main.html)
|
||||
|
||||
Bailer-Jones et al. (2018), AJ 156(2), id.58 "Estimating Distance from
|
||||
Parallaxes. IV. Distances to 1.33 Billion Stars in *Gaia* Data Release 2"
|
||||
|
||||
- Binarity of Hipparcos stars from Gaia pm anomaly
|
||||
(https://cdsarc.unistra.fr/viz-bin/cat/J/A%2bA/623/A72)
|
||||
|
||||
Kervella et al. (2019), A&A 623, id.A72 "Stellar and substellar companions
|
||||
of nearby stars from Gaia DR2. Binarity from proper motion anomaly"
|
||||
|
||||
- Extended Hipparcos Compilation (XHIP)
|
||||
(http://cdsarc.u-strasbg.fr/viz-bin/cat/V/137D)
|
||||
|
||||
Anderson & Francis (2012), AstL 38(5), pp.331–346 "XHIP: An extended
|
||||
Hipparcos compilation"
|
||||
|
||||
- ASCC-2.5, 3rd version (http://cdsarc.u-strasbg.fr/viz-bin/cat/I/280B)
|
||||
|
||||
Kharchenko (2001), Kinematika i Fizika Nebesnykh Tel 17(5), pp.409-423
|
||||
"All-sky compiled catalogue of 2.5 million stars"
|
||||
|
||||
- HD identifications for Tycho-2 stars
|
||||
(https://cdsarc.unistra.fr/viz-bin/cat/IV/25)
|
||||
|
||||
Fabricius et al. (2002), A&A 386, pp.709–710 "Henry Draper catalogue
|
||||
identifications for Tycho-2 stars"
|
||||
|
||||
- Teff and metallicities for Tycho-2 stars
|
||||
(http://cdsarc.u-strasbg.fr/viz-bin/cat/V/136)
|
||||
|
||||
Ammons et al. (2006), ApJ 638(2), pp.1004–1017 "The N2K Consortium. IV. New
|
||||
Temperatures and Metallicities for More than 100,000 FGK Dwarfs"
|
||||
|
||||
- The Tycho-2 Spectral Type Catalog
|
||||
(http://cdsarc.u-strasbg.fr/viz-bin/cat/III/231)
|
||||
|
||||
Wright et al. (2003), AJ 125(1), pp.359–363 "The Tycho-2 Spectral Type
|
||||
Catalog"
|
||||
|
||||
- New spectral types for Tycho2 stars
|
||||
(https://cdsarc.unistra.fr/viz-bin/cat/J/PAZh/34/21)
|
||||
|
||||
Tsvetkov et al. (2008), Astronomy Letters 34(1), pp.17–27 "Inaccuracies in
|
||||
the spectral classification of stars from the Tycho-2 Spectral Type
|
||||
Catalogue"
|
||||
|
||||
- SAO Star Catalog J2000 (http://cdsarc.u-strasbg.fr/viz-bin/cat/I/131A)
|
||||
|
||||
SAO Staff, "Smithsonian Astrophysical Observatory Star Catalog (1990)"
|
||||
|
||||
### Additional catalogues of interest
|
||||
|
||||
- The Hipparcos and Tycho Catalogues (1997)
|
||||
(http://cdsarc.u-strasbg.fr/viz-bin/cat/I/239)
|
||||
- Perryman et al. (1997), A&A 500 pp.501–504 "The Hipparcos Catalogue"
|
||||
- Høg et al. (1997), A&A 323 pp.L57–L60 "The TYCHO Catalogue"
|
||||
|
||||
- Hipparcos, the New Reduction (http://cdsarc.u-strasbg.fr/viz-bin/cat/I/311)
|
||||
|
||||
van Leeuwen (2007), A&A 474(2) pp.653–664 "Validation of the new Hipparcos
|
||||
reduction"
|
||||
|
||||
- The Tycho-2 Catalogue (http://cdsarc.u-strasbg.fr/viz-bin/cat/I/259)
|
||||
|
||||
Høg et al. (2000), A&A 355 pp.L27–L30 "The Tycho-2 catalogue of the 2.5
|
||||
million brightest stars"
|
||||
|
||||
- Henry Draper Catalogue and Extension
|
||||
(http://cdsarc.u-strasbg.fr/viz-bin/cat/III/135A)
|
||||
|
||||
Cannon & Pickering (1918–1924), Annals of the Astronomical Observatory of
|
||||
Harvard College
|
||||
|
||||
### Data processing
|
||||
|
||||
- UBVRIJHK color-temperature calibration
|
||||
(http://cdsarc.u-strasbg.fr/viz-bin/cat/J/ApJS/193/1)
|
||||
|
||||
Worthey & Lee (2011), ApJS, 193(1), id.1 "An Empirical UBV RI JHK
|
||||
Color-Temperature Calibration for Stars"
|
||||
|
||||
- Bailer-Jones (2015), PASP 127(956), pp.994 "Estimating Distances from
|
||||
Parallaxes"
|
||||
|
||||
- Astraatmadja & Bailer-Jones (2016), ApJ 833(1), id.119 "Estimating
|
||||
Distances from Parallaxes. III. Distances of Two Million Stars in the
|
||||
*Gaia* DR1 Catalogue"
|
||||
|
||||
- Oliphant (2006), USA: Trelgol Publishing "A guide to NumPy"
|
||||
|
||||
- van der Walt et al. (2011), Computing in Science & Engineering, 13, 22–30
|
||||
"The NumPy Array: A Structure for Efficient Numerical Computation"
|
||||
|
||||
- Harris et al. (2020), Nature 585, 357–362 "Array programming with NumPy"
|
||||
|
||||
- Astropy Collaboration et al. (2013), A&A 558, id.A33 "Astropy: A community
|
||||
Python package for astronomy"
|
||||
|
||||
- Astropy Collaboration et al. (2018), AJ 156(3), id.123 "The Astropy Project:
|
||||
Building an Open-science Project and Status of the v2.0 Core Package"
|
||||
|
||||
### Databases
|
||||
|
||||
- Wenger et al. (2000), A&A 143, 9–22 "The SIMBAD astronomical database. The
|
||||
CDS reference database for astronomical objects"
|
||||
|
||||
- Ochsenbein et al. (2000), A&AS 143, 23–32 "The VizieR database of
|
||||
astronomical catalogues"
|
||||
|
||||
## Acknowledgements
|
||||
|
||||
This work has made use of data from the European Space Agency (ESA) mission
|
||||
*Gaia* (https://www.cosmos.esa.int/gaia), processed by the *Gaia* Data
|
||||
Processing and Analysis Consortium (DPAC,
|
||||
https://www.cosmos.esa.int/web/gaia/dpac/consortium). Funding for the DPAC has
|
||||
been provided by national institutions, in particular the institutions
|
||||
participating in the *Gaia* Multilateral Agreement.
|
||||
|
||||
This work has made use of the SIMBAD database, operated at CDS, Strasbourg,
|
||||
France.
|
||||
|
||||
This work has made use of the VizieR catalogue access tool, CDS, Strasbourg,
|
||||
France (DOI : 10.26093/cds/vizier). The original description of the VizieR
|
||||
service was published in 2000, A&AS 143, 23.
|
||||
|
||||
This work made use of the cross-match service provided by CDS, Strasbourg.
|
||||
|
||||
This work made use of [Astropy](http://www.astropy.org), a community-developed
|
||||
core Python package for Astronomy.
|
|
@ -1,329 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# gaia-stardb: Processing Gaia DR2 for celestia.Sci/Celestia
|
||||
# Copyright (C) 2019–2020 Andrew Tribick
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
"""Routines for downloading the data files."""
|
||||
|
||||
import contextlib
|
||||
import getpass
|
||||
import os
|
||||
|
||||
from zipfile import ZipFile
|
||||
|
||||
import numpy as np
|
||||
import requests
|
||||
import astropy.io.ascii as io_ascii
|
||||
import astropy.io.votable as votable
|
||||
|
||||
from astropy import units
|
||||
from astropy.table import Table, join, unique, vstack
|
||||
from astroquery.gaia import Gaia
|
||||
from astroquery.utils.tap import Tap
|
||||
from astroquery.xmatch import XMatch
|
||||
|
||||
from parse_utils import open_cds_tarfile
|
||||
|
||||
def yesno(prompt: str, default: bool=False) -> bool:
|
||||
"""Prompt the user for yes/no input."""
|
||||
if default:
|
||||
new_prompt = f'{prompt} (Y/n): '
|
||||
else:
|
||||
new_prompt = f'{prompt} (y/N): '
|
||||
|
||||
while True:
|
||||
answer = input(new_prompt)
|
||||
if answer == '':
|
||||
return default
|
||||
if answer in ('y', 'Y'):
|
||||
return True
|
||||
if answer in ('n', 'N'):
|
||||
return False
|
||||
|
||||
def proceed_checkfile(filename: str) -> bool:
|
||||
"""Check if a file exists, if so prompt the user if they want to replace it."""
|
||||
if os.path.exists(filename):
|
||||
if yesno(f'{filename} already exists, replace?'):
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
os.remove(filename)
|
||||
else:
|
||||
return False
|
||||
return True
|
||||
|
||||
def download_file(outfile_name: str, url: str) -> bool:
|
||||
"""Download a file using requests."""
|
||||
if not proceed_checkfile(outfile_name):
|
||||
return
|
||||
|
||||
print(f'Downloading {url}')
|
||||
response = requests.get(url, stream=True)
|
||||
if response.status_code == 200:
|
||||
with open(outfile_name, 'wb') as f:
|
||||
f.write(response.raw.read())
|
||||
else:
|
||||
print('Failed to download')
|
||||
|
||||
# --- GAIA DATA DOWNLOAD ---
|
||||
|
||||
def download_gaia_data(colname: str, xindex_table: str, outfile_name: str) -> None:
|
||||
"""Query and download Gaia data."""
|
||||
query = f"""SELECT
|
||||
x.source_id, x.original_ext_source_id AS {colname},
|
||||
g.ra, g.dec, g.parallax, g.parallax_error, g.pmra,
|
||||
g.pmdec, g.phot_g_mean_mag, g.bp_rp, g.teff_val,
|
||||
d.r_est, d.r_lo, d.r_hi
|
||||
FROM
|
||||
{xindex_table} x
|
||||
JOIN gaiadr2.gaia_source g ON g.source_id = x.source_id
|
||||
LEFT JOIN external.gaiadr2_geometric_distance d ON d.source_id = x.source_id"""
|
||||
|
||||
print(query)
|
||||
job = Gaia.launch_job_async(query,
|
||||
dump_to_file=True,
|
||||
output_file=outfile_name,
|
||||
output_format='csv')
|
||||
try:
|
||||
job.save_results()
|
||||
finally:
|
||||
Gaia.remove_jobs(job.jobid)
|
||||
|
||||
CONESEARCH_URL = \
|
||||
'https://www.cosmos.esa.int/documents/29201/1769576/Hipparcos2GaiaDR2coneSearch.zip'
|
||||
|
||||
def download_gaia_hip(username: str) -> None:
|
||||
"""Download HIP data from the Gaia archive."""
|
||||
hip_file = os.path.join('gaia', 'gaiadr2_hip-result.csv')
|
||||
if not proceed_checkfile(hip_file):
|
||||
return
|
||||
|
||||
conesearch_file = os.path.join('gaia', 'hip2conesearch.zip')
|
||||
if proceed_checkfile(conesearch_file):
|
||||
download_file(conesearch_file, CONESEARCH_URL)
|
||||
|
||||
# the gaiadr2.hipparcos2_best_neighbour table misses a large number of HIP stars that are
|
||||
# actually present, so use the mapping from Kervella et al. (2019) "Binarity of Hipparcos
|
||||
# stars from Gaia pm anomaly" instead.
|
||||
|
||||
with open_cds_tarfile(os.path.join('vizier', 'hipgpma.tar.gz')) as tf:
|
||||
hip_map = unique(tf.read_gzip('hipgpma.dat', ['HIP', 'GDR2']))
|
||||
|
||||
with ZipFile(conesearch_file, 'r') as csz:
|
||||
with csz.open('Hipparcos2GaiaDR2coneSearch.csv', 'r') as f:
|
||||
cone_map = io_ascii.read(f,
|
||||
format='csv',
|
||||
names=['HIP', 'GDR2', 'dist'],
|
||||
include_names=['HIP', 'GDR2'])
|
||||
|
||||
cone_map = unique(cone_map)
|
||||
|
||||
hip_map = join(hip_map, cone_map, join_type='outer', keys='HIP', table_names=['pm', 'cone'])
|
||||
hip_map['GDR2'] = hip_map['GDR2_pm'].filled(hip_map['GDR2_cone'])
|
||||
hip_map.remove_columns(['GDR2_pm', 'GDR2_cone'])
|
||||
hip_map.rename_column('HIP', 'original_ext_source_id')
|
||||
hip_map.rename_column('GDR2', 'source_id')
|
||||
|
||||
Gaia.upload_table(upload_resource=hip_map, table_name='hipgpma')
|
||||
try:
|
||||
download_gaia_data('hip_id', f'user_{username}.hipgpma', hip_file)
|
||||
finally:
|
||||
Gaia.delete_user_table('hipgpma')
|
||||
|
||||
def _load_gaia_tyc_ids(filename: str) -> Table:
|
||||
with open(filename, 'r') as f:
|
||||
header = f.readline().split(',')
|
||||
col_idx = header.index('tyc2_id')
|
||||
tyc1 = []
|
||||
tyc2 = []
|
||||
tyc3 = []
|
||||
for line in f:
|
||||
try:
|
||||
tyc2_id = line.split(',')[col_idx]
|
||||
except IndexError:
|
||||
continue
|
||||
|
||||
tyc = tyc2_id.split('-')
|
||||
tyc1.append(int(tyc[0]))
|
||||
tyc2.append(int(tyc[1]))
|
||||
tyc3.append(int(tyc[2]))
|
||||
|
||||
return Table([tyc1, tyc2, tyc3], names=['TYC1','TYC2','TYC3'], dtype=('i4', 'i4', 'i4'))
|
||||
|
||||
def _load_ascc_tyc_ids(filename: str) -> Table:
|
||||
data = None
|
||||
with open_cds_tarfile(filename) as tf:
|
||||
for data_file in tf.tf:
|
||||
sections = os.path.split(data_file.name)
|
||||
if len(sections) != 2 or sections[0] != '.' or not sections[1].startswith('cc'):
|
||||
continue
|
||||
section_data = tf.read_gzip(
|
||||
os.path.splitext(sections[1])[0],
|
||||
['TYC1', 'TYC2', 'TYC3'],
|
||||
readme_name='cc*.dat')
|
||||
|
||||
if data is None:
|
||||
data = section_data
|
||||
else:
|
||||
data = vstack([data, section_data], join_type='exact')
|
||||
|
||||
return data
|
||||
|
||||
def get_missing_tyc_ids(tyc_file: str, ascc_file: str) -> Table:
|
||||
"""Finds the ASCC TYC ids that are not present in Gaia cross-match."""
|
||||
print("Finding missing TYC ids in ASCC")
|
||||
t_asc = unique(_load_ascc_tyc_ids(ascc_file))
|
||||
t_gai = _load_gaia_tyc_ids(tyc_file)
|
||||
|
||||
t_gai['in_gaia'] = True
|
||||
|
||||
t_mgd = join(t_asc, t_gai, join_type='left')
|
||||
t_mgd['in_gaia'] = t_mgd['in_gaia'].filled(False)
|
||||
|
||||
t_missing = t_mgd[np.logical_not(t_mgd['in_gaia'])]
|
||||
t_missing = t_missing[t_missing['TYC1'] != 0] # remove invalid entries
|
||||
|
||||
return Table([[f"TYC {t['TYC1']}-{t['TYC2']}-{t['TYC3']}" for t in t_missing]], names=['id'])
|
||||
|
||||
def download_gaia_tyc(username: str) -> None:
|
||||
"""Download TYC data from the Gaia archive."""
|
||||
|
||||
tyc_file = os.path.join('gaia', 'gaiadr2_tyc-result.csv')
|
||||
if proceed_checkfile(tyc_file):
|
||||
download_gaia_data('tyc2_id', 'gaiadr2.tycho2_best_neighbour', tyc_file)
|
||||
|
||||
# Use SIMBAD to fill in some of the missing entries
|
||||
with contextlib.suppress(FileExistsError):
|
||||
os.mkdir('simbad')
|
||||
|
||||
simbad_file = os.path.join('simbad', 'tyc-gaia.votable')
|
||||
if proceed_checkfile(simbad_file):
|
||||
ascc_file = os.path.join('vizier', 'ascc.tar.gz')
|
||||
missing_ids = get_missing_tyc_ids(tyc_file, ascc_file)
|
||||
print("Querying SIMBAD for Gaia DR2 identifiers")
|
||||
simbad = Tap(url='http://simbad.u-strasbg.fr:80/simbad/sim-tap')
|
||||
query = """SELECT
|
||||
id1.id tyc_id, id2.id gaia_id
|
||||
FROM
|
||||
TAP_UPLOAD.missing_tyc src
|
||||
JOIN IDENT id1 ON id1.id = src.id
|
||||
JOIN IDENT id2 ON id2.oidref = id1.oidref
|
||||
WHERE
|
||||
id2.id LIKE 'Gaia DR2 %'"""
|
||||
print(query)
|
||||
job = simbad.launch_job_async(query,
|
||||
upload_resource=missing_ids,
|
||||
upload_table_name='missing_tyc',
|
||||
output_file=simbad_file,
|
||||
output_format='votable',
|
||||
dump_to_file=True)
|
||||
job.save_results()
|
||||
|
||||
tyc2_file = os.path.join('gaia', 'gaiadr2_tyc-result-extra.csv')
|
||||
if proceed_checkfile(tyc2_file):
|
||||
missing_ids = votable.parse(simbad_file).resources[0].tables[0].to_table()
|
||||
|
||||
missing_ids['tyc_id'] = [m[m.rfind(' ')+1:] for m in missing_ids['tyc_id'].astype('U')]
|
||||
missing_ids.rename_column('tyc_id', 'original_ext_source_id')
|
||||
|
||||
missing_ids['gaia_id'] = [int(m[m.rfind(' ')+1:])
|
||||
for m in missing_ids['gaia_id'].astype('U')]
|
||||
missing_ids.rename_column('gaia_id', 'source_id')
|
||||
|
||||
Gaia.upload_table(upload_resource=missing_ids, table_name='tyc_missing')
|
||||
try:
|
||||
download_gaia_data('tyc2_id', 'user_'+username+'.tyc_missing', tyc2_file)
|
||||
finally:
|
||||
Gaia.delete_user_table('tyc_missing')
|
||||
|
||||
def download_gaia() -> None:
|
||||
"""Download data from the Gaia archive."""
|
||||
with contextlib.suppress(FileExistsError):
|
||||
os.mkdir('gaia')
|
||||
|
||||
print('Login to Gaia Archive')
|
||||
username = input('Username: ')
|
||||
if not username:
|
||||
print('Login aborted')
|
||||
return
|
||||
password = getpass.getpass('Password: ')
|
||||
if not password:
|
||||
print('Login aborted')
|
||||
return
|
||||
|
||||
Gaia.login(user=username, password=password)
|
||||
try:
|
||||
download_gaia_hip(username)
|
||||
download_gaia_tyc(username)
|
||||
|
||||
finally:
|
||||
Gaia.logout()
|
||||
|
||||
# --- SAO XMATCH DOWNLOAD ---
|
||||
|
||||
def download_xmatch(cat1: str, cat2: str, outfile_name: str) -> None:
|
||||
"""Download a cross-match from VizieR."""
|
||||
if not proceed_checkfile(outfile_name):
|
||||
return
|
||||
|
||||
result = XMatch.query(cat1=cat1,
|
||||
cat2=cat2,
|
||||
max_distance=5 * units.arcsec)
|
||||
|
||||
io_ascii.write(result, outfile_name, format='csv')
|
||||
|
||||
def download_sao_xmatch() -> None:
|
||||
"""Download cross-matches to the SAO catalogue."""
|
||||
with contextlib.suppress(FileExistsError):
|
||||
os.mkdir('xmatch')
|
||||
|
||||
cross_matches = [
|
||||
('vizier:I/131A/sao', 'vizier:I/311/hip2', 'sao_hip_xmatch.csv'),
|
||||
('vizier:I/131A/sao', 'vizier:I/259/tyc2', 'sao_tyc2_xmatch.csv'),
|
||||
('vizier:I/131A/sao', 'vizier:I/259/suppl_1', 'sao_tyc2_suppl1_xmatch.csv'),
|
||||
('vizier:I/131A/sao', 'vizier:I/259/suppl_2', 'sao_tyc2_suppl2_xmatch.csv'),
|
||||
]
|
||||
|
||||
for cat1, cat2, filename in cross_matches:
|
||||
print(f'Downloading {cat1}-{cat2} crossmatch')
|
||||
download_xmatch(cat1, cat2, os.path.join('xmatch', filename))
|
||||
|
||||
# --- VIZIER DOWNLOAD ---
|
||||
def download_vizier() -> None:
|
||||
"""Download catalogue archive files from VizieR."""
|
||||
with contextlib.suppress(FileExistsError):
|
||||
os.mkdir('vizier')
|
||||
|
||||
files_urls = [
|
||||
('ascc.tar.gz', 'http://cdsarc.u-strasbg.fr/viz-bin/nph-Cat/tar.gz?I/280B'),
|
||||
('hipgpma.tar.gz', 'https://cdsarc.unistra.fr/viz-bin/nph-Cat/tar.gz?J/A+A/623/A72'),
|
||||
# for some reason, the SAO archive at VizieR does not work, so download files individually
|
||||
('sao.dat.gz', 'https://cdsarc.unistra.fr/ftp/I/131A/sao.dat.gz'),
|
||||
('sao.readme', 'https://cdsarc.unistra.fr/ftp/I/131A/ReadMe'),
|
||||
('tyc2hd.tar.gz', 'https://cdsarc.unistra.fr/viz-bin/nph-Cat/tar.gz?IV/25'),
|
||||
('tyc2spec.tar.gz', 'http://cdsarc.u-strasbg.fr/viz-bin/nph-Cat/tar.gz?III/231'),
|
||||
('tyc2specnew.tar.gz', 'https://cdsarc.unistra.fr/viz-bin/nph-Cat/tar.gz?J/PAZh/34/21'),
|
||||
('tyc2teff.tar.gz', 'http://cdsarc.u-strasbg.fr/viz-bin/nph-Cat/tar.gz?V/136'),
|
||||
('ubvriteff.tar.gz', 'http://cdsarc.u-strasbg.fr/viz-bin/nph-Cat/tar.gz?J/ApJS/193/1'),
|
||||
('xhip.tar.gz', 'http://cdsarc.u-strasbg.fr/viz-bin/nph-Cat/tar.gz?V/137D'),
|
||||
]
|
||||
|
||||
for file_name, url in files_urls:
|
||||
download_file(os.path.join('vizier', file_name), url)
|
||||
|
||||
if __name__ == "__main__":
|
||||
download_vizier()
|
||||
download_gaia()
|
||||
download_sao_xmatch()
|
|
@ -1,372 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# gaia-stardb: Processing Gaia DR2 for celestia.Sci/Celestia
|
||||
# Copyright (C) 2019–2020 Andrew Tribick
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
"""Makes the star database."""
|
||||
|
||||
import contextlib
|
||||
import gzip
|
||||
import os
|
||||
import struct
|
||||
|
||||
from zipfile import ZipFile, ZIP_DEFLATED
|
||||
|
||||
import numpy as np
|
||||
import astropy.units as u
|
||||
|
||||
from astropy.table import MaskedColumn, Table, join, unique, vstack
|
||||
|
||||
from parse_hip import process_hip
|
||||
from parse_tyc import process_tyc
|
||||
from parse_utils import WorkaroundCDSReader, open_cds_tarfile
|
||||
from spparse import CEL_UNKNOWN_STAR, parse_spectrum
|
||||
|
||||
VERSION = "1.0.4"
|
||||
|
||||
# remove the following objects from the output
|
||||
|
||||
EXCLUSIONS = [
|
||||
60936, # quasar 3C 273
|
||||
114110, # non-existent star (see HIP1 errata)
|
||||
114176, # non-existent star (see HIP1 errata)
|
||||
]
|
||||
|
||||
# temperatures from star.cpp, spectral types O3-M9
|
||||
|
||||
TEFF_SPEC = np.array([
|
||||
52500, 48000, 44500, 41000, 38000, 35800, 33000,
|
||||
30000, 25400, 22000, 18700, 17000, 15400, 14000, 13000, 11900, 10500,
|
||||
9520, 9230, 8970, 8720, 8460, 8200, 8020, 7850, 7580, 7390,
|
||||
7200, 7050, 6890, 6740, 6590, 6440, 6360, 6280, 6200, 6110,
|
||||
6030, 5940, 5860, 5830, 5800, 5770, 5700, 5630, 5570, 5410,
|
||||
5250, 5080, 4900, 4730, 4590, 4350, 4200, 4060, 3990, 3920,
|
||||
3850, 3720, 3580, 3470, 3370, 3240, 3050, 2940, 2640, 2000])
|
||||
|
||||
TEFF_BINS = (TEFF_SPEC[:-1] + TEFF_SPEC[1:]) // 2
|
||||
|
||||
parse_spectrum_vec = np.vectorize(parse_spectrum, otypes=[np.uint16]) # pylint: disable=invalid-name
|
||||
|
||||
CEL_SPECS = parse_spectrum_vec(['OBAFGKM'[i//10]+str(i%10) for i in range(3, 70)])
|
||||
|
||||
def load_ubvri() -> Table:
|
||||
"""Load UBVRI Teff calibration from VizieR archive."""
|
||||
print('Loading UBVRI calibration')
|
||||
with open_cds_tarfile(os.path.join('vizier', 'ubvriteff.tar.gz')) as tf:
|
||||
return tf.read_gzip('table3.dat', ['V-K', 'B-V', 'V-I', 'J-K', 'H-K', 'Teff'])
|
||||
|
||||
def parse_spectra(data: Table) -> Table:
|
||||
"""Parse the spectral types into the celestia.Sci format."""
|
||||
print('Parsing spectral types')
|
||||
data['SpType'] = data['SpType'].filled('')
|
||||
sptypes = unique(data['SpType',])
|
||||
sptypes['CelSpec'] = parse_spectrum_vec(sptypes['SpType'])
|
||||
return join(data, sptypes)
|
||||
|
||||
def estimate_magnitudes(data: Table) -> None:
|
||||
"""Estimates magnitudes and color indices from G magnitude and BP-RP.
|
||||
|
||||
Formula used is from Evans et al. (2018) "Gaia Data Release 2: Photometric
|
||||
content and validation".
|
||||
"""
|
||||
print("Computing missing magnitudes and color indices")
|
||||
|
||||
bp_rp = data['bp_rp'].filled(0)
|
||||
bp_rp2 = bp_rp**2
|
||||
|
||||
data['Vmag'] = MaskedColumn(
|
||||
data['Vmag'].filled(
|
||||
data['phot_g_mean_mag'].filled(np.nan) + 0.01760 + bp_rp*0.006860 + bp_rp2*0.1732))
|
||||
data['e_Vmag'] = MaskedColumn(data['e_Vmag'].filled(0.045858))
|
||||
data['Vmag'].mask = np.isnan(data['Vmag'])
|
||||
data['e_Vmag'].mask = data['Vmag'].mask
|
||||
|
||||
bp_rp = data['bp_rp'].filled(np.nan)
|
||||
bp_rp2 = bp_rp**2
|
||||
|
||||
imag = data['phot_g_mean_mag'].filled(np.nan) - 0.02085 - bp_rp*0.7419 + bp_rp2*0.09631
|
||||
e_imag = np.where(np.isnan(imag), np.nan, 0.04956)
|
||||
|
||||
f_bmag = data['Bmag'].filled(np.nan)
|
||||
f_vmag = data['Vmag'].filled(np.nan)
|
||||
f_jmag = data['Jmag'].filled(np.nan)
|
||||
f_hmag = data['Hmag'].filled(np.nan)
|
||||
f_kmag = data['Kmag'].filled(np.nan)
|
||||
f_e_bmag = data['e_Bmag'].filled(np.nan)
|
||||
f_e_vmag = data['e_Vmag'].filled(np.nan)
|
||||
f_e_jmag = data['e_Jmag'].filled(np.nan)
|
||||
f_e_hmag = data['e_Hmag'].filled(np.nan)
|
||||
f_e_kmag = data['e_Kmag'].filled(np.nan)
|
||||
|
||||
data['B-V'] = MaskedColumn(data['B-V'].filled(f_bmag - f_vmag))
|
||||
data['e_B-V'] = MaskedColumn(data['e_B-V'].filled(np.sqrt(f_e_bmag**2 + f_e_vmag**2)))
|
||||
data['V-I'] = MaskedColumn(data['V-I'].filled(f_vmag - imag))
|
||||
data['e_V-I'] = MaskedColumn(data['e_V-I'].filled(np.sqrt(f_e_vmag**2 + e_imag**2)))
|
||||
data['V-K'] = MaskedColumn(f_vmag - f_kmag)
|
||||
data['e_V-K'] = MaskedColumn(np.sqrt(f_e_vmag**2 + f_e_kmag**2))
|
||||
data['J-K'] = MaskedColumn(f_jmag - f_kmag)
|
||||
data['e_J-K'] = MaskedColumn(np.sqrt(f_e_jmag**2 + f_e_kmag**2))
|
||||
data['H-K'] = MaskedColumn(f_hmag - f_kmag)
|
||||
data['e_H-K'] = MaskedColumn(np.sqrt(f_e_hmag**2 + f_e_kmag**2))
|
||||
|
||||
data['B-V'].mask = np.logical_or(data['B-V'].mask, np.isnan(data['B-V']))
|
||||
data['e_B-V'].mask = np.logical_or(data['e_B-V'].mask, np.isnan(data['e_B-V']))
|
||||
data['V-I'].mask = np.logical_or(data['V-I'].mask, np.isnan(data['V-I']))
|
||||
data['e_V-I'].mask = np.logical_or(data['e_V-I'].mask, np.isnan(data['e_V-I']))
|
||||
data['V-K'].mask = np.isnan(data['V-K'])
|
||||
data['e_V-K'].mask = np.isnan(data['e_V-K'])
|
||||
data['J-K'].mask = np.isnan(data['J-K'])
|
||||
data['e_J-K'].mask = np.isnan(data['e_J-K'])
|
||||
data['H-K'].mask = np.isnan(data['H-K'])
|
||||
data['e_H-K'].mask = np.isnan(data['e_H-K'])
|
||||
|
||||
data.remove_columns(['Bmag', 'e_Bmag', 'e_Vmag', 'Jmag', 'e_Jmag', 'Hmag', 'e_Hmag',
|
||||
'Kmag', 'e_Kmag'])
|
||||
|
||||
def estimate_temperatures(data: Table) -> None:
|
||||
"""Estimate the temperature of stars."""
|
||||
ubvri_data = load_ubvri()
|
||||
print('Estimating temperatures from color indices')
|
||||
|
||||
indices = Table(
|
||||
[
|
||||
data['B-V'].filled(np.nan),
|
||||
data['V-I'].filled(np.nan),
|
||||
data['V-K'].filled(np.nan),
|
||||
data['J-K'].filled(np.nan),
|
||||
data['H-K'].filled(np.nan),
|
||||
np.maximum(data['e_B-V'].filled(np.nan), 0.001),
|
||||
np.maximum(data['e_V-I'].filled(np.nan), 0.001),
|
||||
np.maximum(data['e_V-K'].filled(np.nan), 0.001),
|
||||
np.maximum(data['e_J-K'].filled(np.nan), 0.001),
|
||||
np.maximum(data['e_H-K'].filled(np.nan), 0.001),
|
||||
],
|
||||
names=['B-V','V-I','V-K','J-K','H-K',
|
||||
'e_B-V','e_V-I','e_V-K','e_J-K','e_H-K'])
|
||||
|
||||
weights = np.full_like(data['HIP'], 0, dtype=np.float64)
|
||||
teffs = np.full_like(data['HIP'], 0, dtype=np.float64)
|
||||
for row in ubvri_data:
|
||||
sumsq = np.maximum(
|
||||
np.nan_to_num(((indices['B-V']-row['B-V'])/indices['e_B-V'])**2)
|
||||
+ np.nan_to_num(((indices['V-K']-row['V-K'])/indices['e_V-K'])**2)
|
||||
+ np.nan_to_num(((indices['J-K']-row['J-K'])/indices['e_J-K'])**2)
|
||||
+ np.nan_to_num(((indices['V-I']-row['V-I'])/indices['e_V-I'])**2)
|
||||
+ np.nan_to_num(((indices['H-K']-row['H-K'])/indices['e_H-K'])**2), 0.001)
|
||||
teffs += row['Teff'] / sumsq
|
||||
weights += 1.0 / sumsq
|
||||
|
||||
data['teff_est'] = teffs / weights
|
||||
data['teff_est'].unit = u.K
|
||||
|
||||
def estimate_spectra(data: Table) -> Table:
|
||||
"""Estimate the spectral type of stars."""
|
||||
no_teff = data[data['teff_val'].mask]
|
||||
# temporarily disable no-member error in pylint, as it cannot see the reduce method
|
||||
# pylint: disable=no-member
|
||||
has_indices = np.logical_and.reduce((no_teff['B-V'].mask,
|
||||
no_teff['V-I'].mask,
|
||||
no_teff['V-K'].mask,
|
||||
no_teff['J-K'].mask,
|
||||
no_teff['H-K'].mask))
|
||||
# pylint: enable=no-member
|
||||
no_teff = no_teff[np.logical_not(has_indices)]
|
||||
estimate_temperatures(no_teff)
|
||||
data = join(data,
|
||||
no_teff['HIP', 'teff_est'],
|
||||
keys=['HIP'],
|
||||
join_type='left')
|
||||
data['teff_val'] = data['teff_val'].filled(data['teff_est'].filled(np.nan))
|
||||
data = data[np.logical_not(np.isnan(data['teff_val']))]
|
||||
data['CelSpec'] = CEL_SPECS[np.digitize(data['teff_val'], TEFF_BINS)]
|
||||
return data
|
||||
|
||||
def load_sao() -> Table:
|
||||
"""Loads the SAO catalog."""
|
||||
print("Loading SAO")
|
||||
|
||||
with open(os.path.join('vizier', 'sao.readme'), 'r') as readme:
|
||||
reader = WorkaroundCDSReader('sao.dat', ['SAO', 'HD'], [np.int64, np.int64], readme)
|
||||
|
||||
with gzip.open(os.path.join('vizier', 'sao.dat.gz'), 'rt', encoding='ascii') as f:
|
||||
data = reader.read(f)
|
||||
|
||||
data = unique(data.group_by('SAO'), keys=['HD'])
|
||||
data = unique(data.group_by('HD'), keys=['SAO'])
|
||||
return data
|
||||
|
||||
def merge_all() -> Table:
|
||||
"""Merges the HIP and TYC data."""
|
||||
hip_data = process_hip()
|
||||
|
||||
# extract the non-Gaia sources to make the merging easier
|
||||
non_gaia = hip_data[hip_data['source_id'].mask]
|
||||
|
||||
# merge object data for objects in both catalogues
|
||||
hip_data = join(hip_data[np.logical_not(hip_data['source_id'].mask)],
|
||||
process_tyc(),
|
||||
keys=['source_id'],
|
||||
table_names=['hip', 'tyc'],
|
||||
join_type='outer')
|
||||
|
||||
# Mask blank spectral type and component identifiers
|
||||
for str_col in (c for c in hip_data.colnames if hip_data[c].dtype.kind == 'U'):
|
||||
hip_data[str_col].mask = np.logical_or(hip_data[str_col].mask, hip_data[str_col] == '')
|
||||
|
||||
prefer_tyc = {'HD', 'SAO', 'Comp'}
|
||||
|
||||
for base_col in (c[:-4] for c in hip_data.colnames if c.endswith('_hip')):
|
||||
hip_col = base_col + '_hip'
|
||||
tyc_col = base_col + '_tyc'
|
||||
hip_data.rename_column(hip_col, base_col)
|
||||
if isinstance(hip_data[base_col], MaskedColumn):
|
||||
mask = np.logical_and(hip_data[base_col].mask, hip_data[tyc_col].mask)
|
||||
if base_col in prefer_tyc:
|
||||
base_data = hip_data[tyc_col].filled(hip_data[base_col])
|
||||
else:
|
||||
base_data = hip_data[base_col].filled(hip_data[tyc_col])
|
||||
hip_data[base_col] = MaskedColumn(base_data, mask=mask)
|
||||
hip_data.remove_column(tyc_col)
|
||||
|
||||
hip_data['HIP'] = hip_data['HIP'].filled(hip_data['TYC'])
|
||||
hip_data.remove_columns('TYC')
|
||||
|
||||
# Add the non-Gaia stars back into the dataset
|
||||
hip_data = vstack([hip_data, non_gaia], join_type='outer', metadata_conflicts='silent')
|
||||
|
||||
# Merge SAO, preferring the values from the SAO catalogue
|
||||
sao = load_sao()
|
||||
sao = sao[np.isin(sao['HD'], hip_data[np.logical_not(hip_data['HD'].mask)]['HD'])]
|
||||
hip_data['SAO'].mask = np.logical_or(hip_data['SAO'].mask,
|
||||
np.isin(hip_data['SAO'], sao['SAO']))
|
||||
|
||||
hd_sao = join(hip_data[np.logical_not(hip_data['HD'].mask)],
|
||||
sao,
|
||||
keys=['HD'],
|
||||
table_names=['xref', 'sao'],
|
||||
join_type='left')
|
||||
hd_sao.rename_column('SAO_xref', 'SAO')
|
||||
hd_sao['SAO'] = MaskedColumn(hd_sao['SAO_sao'].filled(hd_sao['SAO']),
|
||||
mask=np.logical_and(hd_sao['SAO'].mask,
|
||||
hd_sao['SAO_sao'].mask))
|
||||
hd_sao.remove_column('SAO_sao')
|
||||
|
||||
return vstack([hip_data[hip_data['HD'].mask], hd_sao], join_type='exact')
|
||||
|
||||
OBLIQUITY = np.radians(23.4392911)
|
||||
COS_OBLIQUITY = np.cos(OBLIQUITY)
|
||||
SIN_OBLIQUITY = np.sin(OBLIQUITY)
|
||||
ROT_MATRIX = np.array([[1, 0, 0],
|
||||
[0, COS_OBLIQUITY, SIN_OBLIQUITY],
|
||||
[0, -SIN_OBLIQUITY, COS_OBLIQUITY]])
|
||||
|
||||
def process_data() -> Table:
|
||||
"""Processes the missing data values."""
|
||||
data = merge_all()
|
||||
data = data[np.logical_not(data['dist_use'].mask)]
|
||||
data = data[np.isin(data['HIP'], EXCLUSIONS, invert=True)]
|
||||
estimate_magnitudes(data)
|
||||
data = parse_spectra(data)
|
||||
unknown_spectra = data[data['CelSpec'] == CEL_UNKNOWN_STAR]['HIP', 'teff_val', 'B-V', 'e_B-V',
|
||||
'V-I', 'e_V-I', 'V-K', 'e_V-K',
|
||||
'J-K', 'e_J-K', 'H-K', 'e_H-K']
|
||||
unknown_spectra = estimate_spectra(unknown_spectra)
|
||||
data = join(data,
|
||||
unknown_spectra['HIP', 'CelSpec'],
|
||||
keys=['HIP'],
|
||||
join_type='left',
|
||||
table_names=['data', 'est'])
|
||||
data['CelSpec'] = np.where(data['CelSpec_data'] == CEL_UNKNOWN_STAR,
|
||||
data['CelSpec_est'].filled(CEL_UNKNOWN_STAR),
|
||||
data['CelSpec_data'])
|
||||
data.remove_columns(['phot_g_mean_mag', 'bp_rp', 'teff_val', 'SpType', 'B-V', 'e_B-V', 'V-I',
|
||||
'e_V-I', 'V-K', 'e_V-K', 'J-K', 'e_J-K', 'H-K', 'e_H-K', 'CelSpec_est',
|
||||
'CelSpec_data'])
|
||||
|
||||
data['Vmag_abs'] = data['Vmag'] - 5*(np.log10(data['dist_use'])-1)
|
||||
|
||||
print('Converting coordinates to ecliptic frame')
|
||||
|
||||
data['ra'].convert_unit_to(u.rad)
|
||||
data['dec'].convert_unit_to(u.rad)
|
||||
data['dist_use'].convert_unit_to(u.lyr)
|
||||
|
||||
coords = np.matmul(ROT_MATRIX,
|
||||
np.array([data['dist_use']*np.cos(data['ra'])*np.cos(data['dec']),
|
||||
data['dist_use']*np.sin(data['dec']),
|
||||
-data['dist_use']*np.sin(data['ra'])*np.cos(data['dec'])]))
|
||||
data['x'] = coords[0]
|
||||
data['y'] = coords[1]
|
||||
data['z'] = coords[2]
|
||||
|
||||
data['x'].unit = u.lyr
|
||||
data['y'].unit = u.lyr
|
||||
data['z'].unit = u.lyr
|
||||
|
||||
return data
|
||||
|
||||
def write_starsdat(data: Table, outfile: str) -> None:
|
||||
"""Write the stars.dat file."""
|
||||
print('Writing stars.dat')
|
||||
with open(outfile, 'wb') as f:
|
||||
f.write(struct.pack('<8sHL', b'CELSTARS', 0x0100, len(data)))
|
||||
print(f' Writing {len(data)} records')
|
||||
fmt = struct.Struct('<L3fhH')
|
||||
for hip, x, y, z, vmag_abs, celspec in zip(data['HIP'], data['x'], data['y'], data['z'],
|
||||
data['Vmag_abs'], data['CelSpec']):
|
||||
f.write(fmt.pack(hip, x, y, z, int(round(vmag_abs*256)), celspec))
|
||||
|
||||
def write_xindex(data: Table, field: str, outfile: str) -> None:
|
||||
"""Write a cross-index file."""
|
||||
print('Writing '+field+' cross-index')
|
||||
print(' Extracting cross-index data')
|
||||
data = data[np.logical_not(data[field].mask)]['HIP', 'Comp', field]
|
||||
data['Comp'] = data['Comp'].filled('')
|
||||
data = unique(data.group_by([field, 'Comp', 'HIP']), keys=[field])
|
||||
print(f' Writing {len(data)} records')
|
||||
with open(outfile, 'wb') as f:
|
||||
f.write(struct.pack('<8sH', b'CELINDEX', 0x0100))
|
||||
fmt = struct.Struct('<2L')
|
||||
for hip, cat in zip(data['HIP'], data[field]):
|
||||
f.write(fmt.pack(cat, hip))
|
||||
|
||||
def make_stardb() -> None:
|
||||
"""Make the Celestia star database files."""
|
||||
data = process_data()
|
||||
|
||||
with contextlib.suppress(FileExistsError):
|
||||
os.mkdir('output')
|
||||
|
||||
write_starsdat(data, os.path.join('output', 'stars.dat'))
|
||||
|
||||
xindices = [
|
||||
('HD', 'hdxindex.dat'),
|
||||
('SAO', 'saoxindex.dat')
|
||||
]
|
||||
|
||||
for fieldname, outfile in xindices:
|
||||
write_xindex(data, fieldname, os.path.join('output', outfile))
|
||||
|
||||
print("Creating archive")
|
||||
archivename = f'celestia-gaia-stardb-{VERSION}'
|
||||
with ZipFile(f'{archivename}.zip', 'w', compression=ZIP_DEFLATED, compresslevel=9) as zf:
|
||||
contents = ['stars.dat', 'hdxindex.dat', 'saoxindex.dat', 'LICENSE.txt', 'CREDITS.md']
|
||||
for f in contents:
|
||||
zf.write(os.path.join('output', f), arcname=os.path.join(archivename, f))
|
||||
|
||||
if __name__ == '__main__':
|
||||
make_stardb()
|
|
@ -1,149 +0,0 @@
|
|||
# Credits
|
||||
|
||||
These files were generated with the celestia-gaia-stardb application available
|
||||
at https://github.com/ajtribick/celestia-gaia-stardb.
|
||||
|
||||
## References
|
||||
|
||||
### Source catalogues
|
||||
|
||||
- *Gaia* Data Release 2 (https://gea.esac.esa.int/archive/)
|
||||
- *Gaia* Collaboration et al. (2016), A&A 595, id.A1, "The *Gaia* mission"
|
||||
- *Gaia* Collaboration et al. (2018), A&A 616, id.A1, "*Gaia* Data
|
||||
Release 2. Summary of the contents and survey properties"
|
||||
- Andrae et al. (2018), A&A 616, id.A8, "*Gaia* Data Release 2. First
|
||||
stellar parameters from Apsis"
|
||||
- Marrese et al. (2018), A&A 621, id.A144, "*Gaia* Data Release 2.
|
||||
Cross-match with external catalogues: algorithms and results"
|
||||
|
||||
- *Gaia* Data Release 2 Geometric Distances
|
||||
(http://www.mpia.de/~calj/gdr2_distances/main.html)
|
||||
|
||||
Bailer-Jones et al. (2018), AJ 156(2), id.58 "Estimating Distance from
|
||||
Parallaxes. IV. Distances to 1.33 Billion Stars in *Gaia* Data Release 2"
|
||||
|
||||
- Binarity of Hipparcos stars from Gaia pm anomaly
|
||||
(https://cdsarc.unistra.fr/viz-bin/cat/J/A%2bA/623/A72)
|
||||
|
||||
Kervella et al. (2019), A&A 623, id.A72 "Stellar and substellar companions
|
||||
of nearby stars from Gaia DR2. Binarity from proper motion anomaly"
|
||||
|
||||
- Extended Hipparcos Compilation (XHIP)
|
||||
(http://cdsarc.u-strasbg.fr/viz-bin/cat/V/137D)
|
||||
|
||||
Anderson & Francis (2012), AstL 38(5), pp.331–346 "XHIP: An extended
|
||||
Hipparcos compilation"
|
||||
|
||||
- ASCC-2.5, 3rd version (http://cdsarc.u-strasbg.fr/viz-bin/cat/I/280B)
|
||||
|
||||
Kharchenko (2001), Kinematika i Fizika Nebesnykh Tel 17(5), pp.409-423
|
||||
"All-sky compiled catalogue of 2.5 million stars"
|
||||
|
||||
- HD identifications for Tycho-2 stars
|
||||
(https://cdsarc.unistra.fr/viz-bin/cat/IV/25)
|
||||
|
||||
Fabricius et al. (2002), A&A 386, pp.709–710 "Henry Draper catalogue
|
||||
identifications for Tycho-2 stars"
|
||||
|
||||
- Teff and metallicities for Tycho-2 stars
|
||||
(http://cdsarc.u-strasbg.fr/viz-bin/cat/V/136)
|
||||
|
||||
Ammons et al. (2006), ApJ 638(2), pp.1004–1017 "The N2K Consortium. IV. New
|
||||
Temperatures and Metallicities for More than 100,000 FGK Dwarfs"
|
||||
|
||||
- The Tycho-2 Spectral Type Catalog
|
||||
(http://cdsarc.u-strasbg.fr/viz-bin/cat/III/231)
|
||||
|
||||
Wright et al. (2003), AJ 125(1), pp.359–363 "The Tycho-2 Spectral Type
|
||||
Catalog"
|
||||
|
||||
- New spectral types for Tycho2 stars
|
||||
(https://cdsarc.unistra.fr/viz-bin/cat/J/PAZh/34/21)
|
||||
|
||||
Tsvetkov et al. (2008), Astronomy Letters 34(1), pp.17–27 "Inaccuracies in
|
||||
the spectral classification of stars from the Tycho-2 Spectral Type
|
||||
Catalogue"
|
||||
|
||||
- SAO Star Catalog J2000 (http://cdsarc.u-strasbg.fr/viz-bin/cat/I/131A)
|
||||
|
||||
SAO Staff, "Smithsonian Astrophysical Observatory Star Catalog (1990)"
|
||||
|
||||
### Additional catalogues of interest
|
||||
|
||||
- The Hipparcos and Tycho Catalogues (1997)
|
||||
(http://cdsarc.u-strasbg.fr/viz-bin/cat/I/239)
|
||||
- Perryman et al. (1997), A&A 500 pp.501–504 "The Hipparcos Catalogue"
|
||||
- Høg et al. (1997), A&A 323 pp.L57–L60 "The TYCHO Catalogue"
|
||||
|
||||
- Hipparcos, the New Reduction (http://cdsarc.u-strasbg.fr/viz-bin/cat/I/311)
|
||||
|
||||
van Leeuwen (2007), A&A 474(2) pp.653–664 "Validation of the new Hipparcos
|
||||
reduction"
|
||||
|
||||
- The Tycho-2 Catalogue (http://cdsarc.u-strasbg.fr/viz-bin/cat/I/259)
|
||||
|
||||
Høg et al. (2000), A&A 355 pp.L27–L30 "The Tycho-2 catalogue of the 2.5
|
||||
million brightest stars"
|
||||
|
||||
- Henry Draper Catalogue and Extension
|
||||
(http://cdsarc.u-strasbg.fr/viz-bin/cat/III/135A)
|
||||
|
||||
Cannon & Pickering (1918–1924), Annals of the Astronomical Observatory of
|
||||
Harvard College
|
||||
|
||||
### Data processing
|
||||
|
||||
- UBVRIJHK color-temperature calibration
|
||||
(http://cdsarc.u-strasbg.fr/viz-bin/cat/J/ApJS/193/1)
|
||||
|
||||
Worthey & Lee (2011), ApJS, 193(1), id.1 "An Empirical UBV RI JHK
|
||||
Color-Temperature Calibration for Stars"
|
||||
|
||||
- Bailer-Jones (2015), PASP 127(956), pp.994 "Estimating Distances from
|
||||
Parallaxes"
|
||||
|
||||
- Astraatmadja & Bailer-Jones (2016), ApJ 833(1), id.119 "Estimating
|
||||
Distances from Parallaxes. III. Distances of Two Million Stars in the
|
||||
*Gaia* DR1 Catalogue"
|
||||
|
||||
- Oliphant (2006), USA: Trelgol Publishing "A guide to NumPy"
|
||||
|
||||
- van der Walt et al. (2011), Computing in Science & Engineering, 13, 22–30
|
||||
"The NumPy Array: A Structure for Efficient Numerical Computation"
|
||||
|
||||
- Harris et al. (2020), Nature 585, 357–362 "Array programming with NumPy"
|
||||
|
||||
- Astropy Collaboration et al. (2013), A&A 558, id.A33 "Astropy: A community
|
||||
Python package for astronomy"
|
||||
|
||||
- Astropy Collaboration et al. (2018), AJ 156(3), id.123 "The Astropy Project:
|
||||
Building an Open-science Project and Status of the v2.0 Core Package"
|
||||
|
||||
### Databases
|
||||
|
||||
- Wenger et al. (2000), A&A 143, 9–22 "The SIMBAD astronomical database. The
|
||||
CDS reference database for astronomical objects"
|
||||
|
||||
- Ochsenbein et al. (2000), A&AS 143, 23–32 "The VizieR database of
|
||||
astronomical catalogues"
|
||||
|
||||
## Acknowledgements
|
||||
|
||||
This work has made use of data from the European Space Agency (ESA) mission
|
||||
*Gaia* (https://www.cosmos.esa.int/gaia), processed by the *Gaia* Data
|
||||
Processing and Analysis Consortium (DPAC,
|
||||
https://www.cosmos.esa.int/web/gaia/dpac/consortium). Funding for the DPAC has
|
||||
been provided by national institutions, in particular the institutions
|
||||
participating in the *Gaia* Multilateral Agreement.
|
||||
|
||||
This work has made use of the SIMBAD database, operated at CDS, Strasbourg,
|
||||
France.
|
||||
|
||||
This work has made use of the VizieR catalogue access tool, CDS, Strasbourg,
|
||||
France (DOI : 10.26093/cds/vizier). The original description of the VizieR
|
||||
service was published in 2000, A&AS 143, 23.
|
||||
|
||||
This work made use of the cross-match service provided by CDS, Strasbourg.
|
||||
|
||||
This work made use of [Astropy](http://www.astropy.org), a community-developed
|
||||
core Python package for Astronomy.
|
|
@ -1,428 +0,0 @@
|
|||
Attribution-ShareAlike 4.0 International
|
||||
|
||||
=======================================================================
|
||||
|
||||
Creative Commons Corporation ("Creative Commons") is not a law firm and
|
||||
does not provide legal services or legal advice. Distribution of
|
||||
Creative Commons public licenses does not create a lawyer-client or
|
||||
other relationship. Creative Commons makes its licenses and related
|
||||
information available on an "as-is" basis. Creative Commons gives no
|
||||
warranties regarding its licenses, any material licensed under their
|
||||
terms and conditions, or any related information. Creative Commons
|
||||
disclaims all liability for damages resulting from their use to the
|
||||
fullest extent possible.
|
||||
|
||||
Using Creative Commons Public Licenses
|
||||
|
||||
Creative Commons public licenses provide a standard set of terms and
|
||||
conditions that creators and other rights holders may use to share
|
||||
original works of authorship and other material subject to copyright
|
||||
and certain other rights specified in the public license below. The
|
||||
following considerations are for informational purposes only, are not
|
||||
exhaustive, and do not form part of our licenses.
|
||||
|
||||
Considerations for licensors: Our public licenses are
|
||||
intended for use by those authorized to give the public
|
||||
permission to use material in ways otherwise restricted by
|
||||
copyright and certain other rights. Our licenses are
|
||||
irrevocable. Licensors should read and understand the terms
|
||||
and conditions of the license they choose before applying it.
|
||||
Licensors should also secure all rights necessary before
|
||||
applying our licenses so that the public can reuse the
|
||||
material as expected. Licensors should clearly mark any
|
||||
material not subject to the license. This includes other CC-
|
||||
licensed material, or material used under an exception or
|
||||
limitation to copyright. More considerations for licensors:
|
||||
wiki.creativecommons.org/Considerations_for_licensors
|
||||
|
||||
Considerations for the public: By using one of our public
|
||||
licenses, a licensor grants the public permission to use the
|
||||
licensed material under specified terms and conditions. If
|
||||
the licensor's permission is not necessary for any reason--for
|
||||
example, because of any applicable exception or limitation to
|
||||
copyright--then that use is not regulated by the license. Our
|
||||
licenses grant only permissions under copyright and certain
|
||||
other rights that a licensor has authority to grant. Use of
|
||||
the licensed material may still be restricted for other
|
||||
reasons, including because others have copyright or other
|
||||
rights in the material. A licensor may make special requests,
|
||||
such as asking that all changes be marked or described.
|
||||
Although not required by our licenses, you are encouraged to
|
||||
respect those requests where reasonable. More considerations
|
||||
for the public:
|
||||
wiki.creativecommons.org/Considerations_for_licensees
|
||||
|
||||
=======================================================================
|
||||
|
||||
Creative Commons Attribution-ShareAlike 4.0 International Public
|
||||
License
|
||||
|
||||
By exercising the Licensed Rights (defined below), You accept and agree
|
||||
to be bound by the terms and conditions of this Creative Commons
|
||||
Attribution-ShareAlike 4.0 International Public License ("Public
|
||||
License"). To the extent this Public License may be interpreted as a
|
||||
contract, You are granted the Licensed Rights in consideration of Your
|
||||
acceptance of these terms and conditions, and the Licensor grants You
|
||||
such rights in consideration of benefits the Licensor receives from
|
||||
making the Licensed Material available under these terms and
|
||||
conditions.
|
||||
|
||||
|
||||
Section 1 -- Definitions.
|
||||
|
||||
a. Adapted Material means material subject to Copyright and Similar
|
||||
Rights that is derived from or based upon the Licensed Material
|
||||
and in which the Licensed Material is translated, altered,
|
||||
arranged, transformed, or otherwise modified in a manner requiring
|
||||
permission under the Copyright and Similar Rights held by the
|
||||
Licensor. For purposes of this Public License, where the Licensed
|
||||
Material is a musical work, performance, or sound recording,
|
||||
Adapted Material is always produced where the Licensed Material is
|
||||
synched in timed relation with a moving image.
|
||||
|
||||
b. Adapter's License means the license You apply to Your Copyright
|
||||
and Similar Rights in Your contributions to Adapted Material in
|
||||
accordance with the terms and conditions of this Public License.
|
||||
|
||||
c. BY-SA Compatible License means a license listed at
|
||||
creativecommons.org/compatiblelicenses, approved by Creative
|
||||
Commons as essentially the equivalent of this Public License.
|
||||
|
||||
d. Copyright and Similar Rights means copyright and/or similar rights
|
||||
closely related to copyright including, without limitation,
|
||||
performance, broadcast, sound recording, and Sui Generis Database
|
||||
Rights, without regard to how the rights are labeled or
|
||||
categorized. For purposes of this Public License, the rights
|
||||
specified in Section 2(b)(1)-(2) are not Copyright and Similar
|
||||
Rights.
|
||||
|
||||
e. Effective Technological Measures means those measures that, in the
|
||||
absence of proper authority, may not be circumvented under laws
|
||||
fulfilling obligations under Article 11 of the WIPO Copyright
|
||||
Treaty adopted on December 20, 1996, and/or similar international
|
||||
agreements.
|
||||
|
||||
f. Exceptions and Limitations means fair use, fair dealing, and/or
|
||||
any other exception or limitation to Copyright and Similar Rights
|
||||
that applies to Your use of the Licensed Material.
|
||||
|
||||
g. License Elements means the license attributes listed in the name
|
||||
of a Creative Commons Public License. The License Elements of this
|
||||
Public License are Attribution and ShareAlike.
|
||||
|
||||
h. Licensed Material means the artistic or literary work, database,
|
||||
or other material to which the Licensor applied this Public
|
||||
License.
|
||||
|
||||
i. Licensed Rights means the rights granted to You subject to the
|
||||
terms and conditions of this Public License, which are limited to
|
||||
all Copyright and Similar Rights that apply to Your use of the
|
||||
Licensed Material and that the Licensor has authority to license.
|
||||
|
||||
j. Licensor means the individual(s) or entity(ies) granting rights
|
||||
under this Public License.
|
||||
|
||||
k. Share means to provide material to the public by any means or
|
||||
process that requires permission under the Licensed Rights, such
|
||||
as reproduction, public display, public performance, distribution,
|
||||
dissemination, communication, or importation, and to make material
|
||||
available to the public including in ways that members of the
|
||||
public may access the material from a place and at a time
|
||||
individually chosen by them.
|
||||
|
||||
l. Sui Generis Database Rights means rights other than copyright
|
||||
resulting from Directive 96/9/EC of the European Parliament and of
|
||||
the Council of 11 March 1996 on the legal protection of databases,
|
||||
as amended and/or succeeded, as well as other essentially
|
||||
equivalent rights anywhere in the world.
|
||||
|
||||
m. You means the individual or entity exercising the Licensed Rights
|
||||
under this Public License. Your has a corresponding meaning.
|
||||
|
||||
|
||||
Section 2 -- Scope.
|
||||
|
||||
a. License grant.
|
||||
|
||||
1. Subject to the terms and conditions of this Public License,
|
||||
the Licensor hereby grants You a worldwide, royalty-free,
|
||||
non-sublicensable, non-exclusive, irrevocable license to
|
||||
exercise the Licensed Rights in the Licensed Material to:
|
||||
|
||||
a. reproduce and Share the Licensed Material, in whole or
|
||||
in part; and
|
||||
|
||||
b. produce, reproduce, and Share Adapted Material.
|
||||
|
||||
2. Exceptions and Limitations. For the avoidance of doubt, where
|
||||
Exceptions and Limitations apply to Your use, this Public
|
||||
License does not apply, and You do not need to comply with
|
||||
its terms and conditions.
|
||||
|
||||
3. Term. The term of this Public License is specified in Section
|
||||
6(a).
|
||||
|
||||
4. Media and formats; technical modifications allowed. The
|
||||
Licensor authorizes You to exercise the Licensed Rights in
|
||||
all media and formats whether now known or hereafter created,
|
||||
and to make technical modifications necessary to do so. The
|
||||
Licensor waives and/or agrees not to assert any right or
|
||||
authority to forbid You from making technical modifications
|
||||
necessary to exercise the Licensed Rights, including
|
||||
technical modifications necessary to circumvent Effective
|
||||
Technological Measures. For purposes of this Public License,
|
||||
simply making modifications authorized by this Section 2(a)
|
||||
(4) never produces Adapted Material.
|
||||
|
||||
5. Downstream recipients.
|
||||
|
||||
a. Offer from the Licensor -- Licensed Material. Every
|
||||
recipient of the Licensed Material automatically
|
||||
receives an offer from the Licensor to exercise the
|
||||
Licensed Rights under the terms and conditions of this
|
||||
Public License.
|
||||
|
||||
b. Additional offer from the Licensor -- Adapted Material.
|
||||
Every recipient of Adapted Material from You
|
||||
automatically receives an offer from the Licensor to
|
||||
exercise the Licensed Rights in the Adapted Material
|
||||
under the conditions of the Adapter's License You apply.
|
||||
|
||||
c. No downstream restrictions. You may not offer or impose
|
||||
any additional or different terms or conditions on, or
|
||||
apply any Effective Technological Measures to, the
|
||||
Licensed Material if doing so restricts exercise of the
|
||||
Licensed Rights by any recipient of the Licensed
|
||||
Material.
|
||||
|
||||
6. No endorsement. Nothing in this Public License constitutes or
|
||||
may be construed as permission to assert or imply that You
|
||||
are, or that Your use of the Licensed Material is, connected
|
||||
with, or sponsored, endorsed, or granted official status by,
|
||||
the Licensor or others designated to receive attribution as
|
||||
provided in Section 3(a)(1)(A)(i).
|
||||
|
||||
b. Other rights.
|
||||
|
||||
1. Moral rights, such as the right of integrity, are not
|
||||
licensed under this Public License, nor are publicity,
|
||||
privacy, and/or other similar personality rights; however, to
|
||||
the extent possible, the Licensor waives and/or agrees not to
|
||||
assert any such rights held by the Licensor to the limited
|
||||
extent necessary to allow You to exercise the Licensed
|
||||
Rights, but not otherwise.
|
||||
|
||||
2. Patent and trademark rights are not licensed under this
|
||||
Public License.
|
||||
|
||||
3. To the extent possible, the Licensor waives any right to
|
||||
collect royalties from You for the exercise of the Licensed
|
||||
Rights, whether directly or through a collecting society
|
||||
under any voluntary or waivable statutory or compulsory
|
||||
licensing scheme. In all other cases the Licensor expressly
|
||||
reserves any right to collect such royalties.
|
||||
|
||||
|
||||
Section 3 -- License Conditions.
|
||||
|
||||
Your exercise of the Licensed Rights is expressly made subject to the
|
||||
following conditions.
|
||||
|
||||
a. Attribution.
|
||||
|
||||
1. If You Share the Licensed Material (including in modified
|
||||
form), You must:
|
||||
|
||||
a. retain the following if it is supplied by the Licensor
|
||||
with the Licensed Material:
|
||||
|
||||
i. identification of the creator(s) of the Licensed
|
||||
Material and any others designated to receive
|
||||
attribution, in any reasonable manner requested by
|
||||
the Licensor (including by pseudonym if
|
||||
designated);
|
||||
|
||||
ii. a copyright notice;
|
||||
|
||||
iii. a notice that refers to this Public License;
|
||||
|
||||
iv. a notice that refers to the disclaimer of
|
||||
warranties;
|
||||
|
||||
v. a URI or hyperlink to the Licensed Material to the
|
||||
extent reasonably practicable;
|
||||
|
||||
b. indicate if You modified the Licensed Material and
|
||||
retain an indication of any previous modifications; and
|
||||
|
||||
c. indicate the Licensed Material is licensed under this
|
||||
Public License, and include the text of, or the URI or
|
||||
hyperlink to, this Public License.
|
||||
|
||||
2. You may satisfy the conditions in Section 3(a)(1) in any
|
||||
reasonable manner based on the medium, means, and context in
|
||||
which You Share the Licensed Material. For example, it may be
|
||||
reasonable to satisfy the conditions by providing a URI or
|
||||
hyperlink to a resource that includes the required
|
||||
information.
|
||||
|
||||
3. If requested by the Licensor, You must remove any of the
|
||||
information required by Section 3(a)(1)(A) to the extent
|
||||
reasonably practicable.
|
||||
|
||||
b. ShareAlike.
|
||||
|
||||
In addition to the conditions in Section 3(a), if You Share
|
||||
Adapted Material You produce, the following conditions also apply.
|
||||
|
||||
1. The Adapter's License You apply must be a Creative Commons
|
||||
license with the same License Elements, this version or
|
||||
later, or a BY-SA Compatible License.
|
||||
|
||||
2. You must include the text of, or the URI or hyperlink to, the
|
||||
Adapter's License You apply. You may satisfy this condition
|
||||
in any reasonable manner based on the medium, means, and
|
||||
context in which You Share Adapted Material.
|
||||
|
||||
3. You may not offer or impose any additional or different terms
|
||||
or conditions on, or apply any Effective Technological
|
||||
Measures to, Adapted Material that restrict exercise of the
|
||||
rights granted under the Adapter's License You apply.
|
||||
|
||||
|
||||
Section 4 -- Sui Generis Database Rights.
|
||||
|
||||
Where the Licensed Rights include Sui Generis Database Rights that
|
||||
apply to Your use of the Licensed Material:
|
||||
|
||||
a. for the avoidance of doubt, Section 2(a)(1) grants You the right
|
||||
to extract, reuse, reproduce, and Share all or a substantial
|
||||
portion of the contents of the database;
|
||||
|
||||
b. if You include all or a substantial portion of the database
|
||||
contents in a database in which You have Sui Generis Database
|
||||
Rights, then the database in which You have Sui Generis Database
|
||||
Rights (but not its individual contents) is Adapted Material,
|
||||
|
||||
including for purposes of Section 3(b); and
|
||||
c. You must comply with the conditions in Section 3(a) if You Share
|
||||
all or a substantial portion of the contents of the database.
|
||||
|
||||
For the avoidance of doubt, this Section 4 supplements and does not
|
||||
replace Your obligations under this Public License where the Licensed
|
||||
Rights include other Copyright and Similar Rights.
|
||||
|
||||
|
||||
Section 5 -- Disclaimer of Warranties and Limitation of Liability.
|
||||
|
||||
a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE
|
||||
EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS
|
||||
AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF
|
||||
ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS,
|
||||
IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION,
|
||||
WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR
|
||||
PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS,
|
||||
ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT
|
||||
KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT
|
||||
ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU.
|
||||
|
||||
b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE
|
||||
TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION,
|
||||
NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT,
|
||||
INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES,
|
||||
COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR
|
||||
USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN
|
||||
ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR
|
||||
DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR
|
||||
IN PART, THIS LIMITATION MAY NOT APPLY TO YOU.
|
||||
|
||||
c. The disclaimer of warranties and limitation of liability provided
|
||||
above shall be interpreted in a manner that, to the extent
|
||||
possible, most closely approximates an absolute disclaimer and
|
||||
waiver of all liability.
|
||||
|
||||
|
||||
Section 6 -- Term and Termination.
|
||||
|
||||
a. This Public License applies for the term of the Copyright and
|
||||
Similar Rights licensed here. However, if You fail to comply with
|
||||
this Public License, then Your rights under this Public License
|
||||
terminate automatically.
|
||||
|
||||
b. Where Your right to use the Licensed Material has terminated under
|
||||
Section 6(a), it reinstates:
|
||||
|
||||
1. automatically as of the date the violation is cured, provided
|
||||
it is cured within 30 days of Your discovery of the
|
||||
violation; or
|
||||
|
||||
2. upon express reinstatement by the Licensor.
|
||||
|
||||
For the avoidance of doubt, this Section 6(b) does not affect any
|
||||
right the Licensor may have to seek remedies for Your violations
|
||||
of this Public License.
|
||||
|
||||
c. For the avoidance of doubt, the Licensor may also offer the
|
||||
Licensed Material under separate terms or conditions or stop
|
||||
distributing the Licensed Material at any time; however, doing so
|
||||
will not terminate this Public License.
|
||||
|
||||
d. Sections 1, 5, 6, 7, and 8 survive termination of this Public
|
||||
License.
|
||||
|
||||
|
||||
Section 7 -- Other Terms and Conditions.
|
||||
|
||||
a. The Licensor shall not be bound by any additional or different
|
||||
terms or conditions communicated by You unless expressly agreed.
|
||||
|
||||
b. Any arrangements, understandings, or agreements regarding the
|
||||
Licensed Material not stated herein are separate from and
|
||||
independent of the terms and conditions of this Public License.
|
||||
|
||||
|
||||
Section 8 -- Interpretation.
|
||||
|
||||
a. For the avoidance of doubt, this Public License does not, and
|
||||
shall not be interpreted to, reduce, limit, restrict, or impose
|
||||
conditions on any use of the Licensed Material that could lawfully
|
||||
be made without permission under this Public License.
|
||||
|
||||
b. To the extent possible, if any provision of this Public License is
|
||||
deemed unenforceable, it shall be automatically reformed to the
|
||||
minimum extent necessary to make it enforceable. If the provision
|
||||
cannot be reformed, it shall be severed from this Public License
|
||||
without affecting the enforceability of the remaining terms and
|
||||
conditions.
|
||||
|
||||
c. No term or condition of this Public License will be waived and no
|
||||
failure to comply consented to unless expressly agreed to by the
|
||||
Licensor.
|
||||
|
||||
d. Nothing in this Public License constitutes or may be interpreted
|
||||
as a limitation upon, or waiver of, any privileges and immunities
|
||||
that apply to the Licensor or You, including from the legal
|
||||
processes of any jurisdiction or authority.
|
||||
|
||||
|
||||
=======================================================================
|
||||
|
||||
Creative Commons is not a party to its public
|
||||
licenses. Notwithstanding, Creative Commons may elect to apply one of
|
||||
its public licenses to material it publishes and in those instances
|
||||
will be considered the “Licensor.” The text of the Creative Commons
|
||||
public licenses is dedicated to the public domain under the CC0 Public
|
||||
Domain Dedication. Except for the limited purpose of indicating that
|
||||
material is shared under a Creative Commons public license or as
|
||||
otherwise permitted by the Creative Commons policies published at
|
||||
creativecommons.org/policies, Creative Commons does not authorize the
|
||||
use of the trademark "Creative Commons" or any other trademark or logo
|
||||
of Creative Commons without its prior written consent including,
|
||||
without limitation, in connection with any unauthorized modifications
|
||||
to any of its public licenses or any other arrangements,
|
||||
understandings, or agreements concerning use of licensed material. For
|
||||
the avoidance of doubt, this paragraph does not form part of the
|
||||
public licenses.
|
||||
|
||||
Creative Commons may be contacted at creativecommons.org.
|
||||
|
|
@ -1,192 +0,0 @@
|
|||
# gaia-stardb: Processing Gaia DR2 for celestia.Sci/Celestia
|
||||
# Copyright (C) 2019–2020 Andrew Tribick
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
"""Routines for parsing the HIP data."""
|
||||
|
||||
import os
|
||||
|
||||
import numpy as np
|
||||
import astropy.io.ascii as io_ascii
|
||||
import astropy.units as u
|
||||
|
||||
from astropy.coordinates import ICRS, SkyCoord
|
||||
from astropy.table import Table, join, unique
|
||||
from astropy.time import Time
|
||||
|
||||
from parse_utils import open_cds_tarfile, read_gaia
|
||||
|
||||
def load_gaia_hip() -> Table:
|
||||
"""Load the Gaia DR2 HIP sources."""
|
||||
print('Loading Gaia DR2 sources for HIP')
|
||||
|
||||
gaia = read_gaia(os.path.join('gaia', 'gaiadr2_hip-result.csv'),
|
||||
'hip_id',
|
||||
extra_fields=['parallax', 'parallax_error'])
|
||||
gaia.rename_column('hip_id', 'HIP')
|
||||
return gaia
|
||||
|
||||
def load_xhip() -> Table:
|
||||
"""Load the XHIP catalogue from the VizieR archive."""
|
||||
print('Loading XHIP')
|
||||
with open_cds_tarfile(os.path.join('vizier', 'xhip.tar.gz')) as tf:
|
||||
print(' Loading main catalog')
|
||||
hip_data = tf.read_gzip(
|
||||
'main.dat',
|
||||
['HIP', 'Comp', 'RAdeg', 'DEdeg', 'Plx', 'pmRA', 'pmDE',
|
||||
'e_Plx', 'Dist', 'e_Dist', 'SpType', 'RV'],
|
||||
fill_values=[('', '-1', 'Tc', 'Lc'), ('', 'NaN', 'phi')])
|
||||
hip_data.add_index('HIP')
|
||||
|
||||
print(' Loading photometric data')
|
||||
photo_data = tf.read_gzip(
|
||||
'photo.dat',
|
||||
['HIP', 'Vmag', 'Jmag', 'Hmag', 'Kmag', 'e_Jmag', 'e_Hmag', 'e_Kmag',
|
||||
'B-V', 'V-I', 'e_B-V', 'e_V-I'])
|
||||
photo_data['HIP'].unit = None # for some reason it is set to parsecs in the ReadMe
|
||||
photo_data.add_index('HIP')
|
||||
hip_data = join(hip_data, photo_data, join_type='left', keys='HIP')
|
||||
|
||||
print(' Loading bibliographic data')
|
||||
biblio_data = tf.read_gzip('biblio.dat', ['HIP', 'HD'])
|
||||
biblio_data.add_index('HIP')
|
||||
return join(hip_data, biblio_data, join_type='left', keys='HIP')
|
||||
|
||||
def load_tyc2specnew() -> Table:
|
||||
"""Load revised spectral types."""
|
||||
print("Loading revised TYC2 spectral types")
|
||||
with open_cds_tarfile(os.path.join('vizier', 'tyc2specnew.tar.gz')) as tf:
|
||||
data = tf.read('table2.dat', ['HIP', 'SpType1'])
|
||||
return data[data['SpType1'] != '']
|
||||
|
||||
def load_sao() -> Table:
|
||||
"""Load the SAO-HIP cross match."""
|
||||
print('Loading SAO-HIP cross match')
|
||||
data = io_ascii.read(os.path.join('xmatch', 'sao_hip_xmatch.csv'),
|
||||
include_names=['HIP', 'SAO', 'angDist', 'delFlag'],
|
||||
format='csv')
|
||||
|
||||
data = data[data['delFlag'].mask]
|
||||
data.remove_column('delFlag')
|
||||
|
||||
data = unique(data.group_by(['HIP', 'angDist']), keys=['HIP'])
|
||||
data.remove_column('angDist')
|
||||
|
||||
data.add_index('HIP')
|
||||
return data
|
||||
|
||||
def compute_distances(hip_data: Table, length_kpc: float=1.35) -> None:
|
||||
"""Compute the distance using an exponentially-decreasing prior.
|
||||
|
||||
The method is described in:
|
||||
|
||||
Bailer-Jones (2015) "Estimating Distances from Parallaxes"
|
||||
https://ui.adsabs.harvard.edu/abs/2015PASP..127..994B/abstract
|
||||
|
||||
Using a uniform length scale of 1.35 kpc as suggested for the TGAS
|
||||
catalogue of stars in Gaia DR1.
|
||||
|
||||
Astraatmadja & Bailer-Jones "Estimating distances from parallaxes.
|
||||
III. Distances of two million stars in the Gaia DR1 catalogue"
|
||||
https://ui.adsabs.harvard.edu/abs/2016ApJ...833..119A/abstract
|
||||
"""
|
||||
|
||||
print('Computing distances')
|
||||
|
||||
eplx2 = hip_data['e_Plx'] ** 2
|
||||
|
||||
r3coeff = np.full_like(hip_data['Plx'], 1/length_kpc)
|
||||
r2coeff = np.full_like(hip_data['Plx'], -2)
|
||||
|
||||
roots = np.apply_along_axis(np.roots,
|
||||
0,
|
||||
[r3coeff, r2coeff, hip_data['Plx'] / eplx2, -1 / eplx2])
|
||||
roots[np.logical_or(np.real(roots) < 0.0, abs(np.imag(roots)) > 1.0e-6)] = np.nan
|
||||
parallax_distance = np.nanmin(np.real(roots), 0) * 1000
|
||||
|
||||
# prefer cluster distances (e_Dist NULL), otherwise use computed distance
|
||||
is_cluster_distance = np.logical_and(np.logical_not(hip_data['Dist'].mask),
|
||||
hip_data['e_Dist'].mask)
|
||||
|
||||
hip_data['r_est'] = np.where(is_cluster_distance, hip_data['Dist'], parallax_distance)
|
||||
hip_data['r_est'].unit = u.pc
|
||||
|
||||
HIP_TIME = Time('J1991.25')
|
||||
GAIA_TIME = Time('J2015.5')
|
||||
|
||||
def update_coordinates(hip_data: Table) -> None:
|
||||
"""Update the coordinates from J1991.25 to J2015.5 to match Gaia."""
|
||||
print('Updating coordinates to J2015.5')
|
||||
coords = SkyCoord(frame=ICRS,
|
||||
ra=hip_data['RAdeg'],
|
||||
dec=hip_data['DEdeg'],
|
||||
pm_ra_cosdec=hip_data['pmRA'],
|
||||
pm_dec=hip_data['pmDE'],
|
||||
distance=hip_data['r_est'],
|
||||
radial_velocity=hip_data['RV'].filled(0),
|
||||
obstime=HIP_TIME).apply_space_motion(GAIA_TIME)
|
||||
|
||||
hip_data['ra'] = coords.ra / u.deg
|
||||
hip_data['ra'].unit = u.deg
|
||||
hip_data['dec'] = coords.dec / u.deg
|
||||
hip_data['dec'].unit = u.deg
|
||||
|
||||
def process_xhip() -> Table:
|
||||
"""Processes the XHIP data."""
|
||||
xhip = load_xhip()
|
||||
sptypes = load_tyc2specnew()
|
||||
xhip = join(xhip, sptypes, keys=['HIP'], join_type='left', metadata_conflicts='silent')
|
||||
xhip['SpType'] = xhip['SpType1'].filled(xhip['SpType'])
|
||||
xhip.remove_column('SpType1')
|
||||
|
||||
compute_distances(xhip)
|
||||
update_coordinates(xhip)
|
||||
xhip.remove_columns(['RAdeg', 'DEdeg', 'pmRA', 'pmDE', 'RV', 'Dist', 'e_Dist'])
|
||||
return xhip
|
||||
|
||||
def process_hip() -> Table:
|
||||
"""Process the Gaia and HIP data."""
|
||||
data = join(load_gaia_hip(),
|
||||
process_xhip(),
|
||||
keys=['HIP'],
|
||||
join_type='outer',
|
||||
table_names=['gaia', 'xhip'])
|
||||
|
||||
data = join(data, load_sao(), keys=['HIP'], join_type='left')
|
||||
|
||||
data['r_gaia_score'] = np.where(data['r_est_gaia'].mask,
|
||||
-20000.0,
|
||||
np.where(data['parallax'] <= 0,
|
||||
-10000.0,
|
||||
data['parallax'] / data['parallax_error']))
|
||||
|
||||
data['r_xhip_score'] = np.where(data['Plx'] <= 0,
|
||||
-10000.0,
|
||||
data['Plx'] / data['e_Plx'])
|
||||
|
||||
data['dist_use'] = np.where(data['r_gaia_score'] >= data['r_xhip_score'],
|
||||
data['r_est_gaia'],
|
||||
data['r_est_xhip'])
|
||||
data['dist_use'].unit = u.pc
|
||||
|
||||
data['ra'] = data['ra_gaia'].filled(data['ra_xhip'])
|
||||
data['dec'] = data['dec_gaia'].filled(data['dec_xhip'])
|
||||
|
||||
data.remove_columns(['ra_gaia', 'dec_gaia', 'r_est_gaia', 'ra_xhip', 'dec_xhip', 'r_est_xhip',
|
||||
'parallax', 'parallax_error', 'Plx', 'e_Plx',
|
||||
'r_gaia_score', 'r_xhip_score'])
|
||||
|
||||
return data
|
|
@ -1,297 +0,0 @@
|
|||
# gaia-stardb: Processing Gaia DR2 for celestia.Sci/Celestia
|
||||
# Copyright (C) 2019–2020 Andrew Tribick
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
"""Routines for parsing the TYC2 data."""
|
||||
|
||||
import gzip
|
||||
import os
|
||||
import tarfile
|
||||
|
||||
from typing import Dict, IO, Tuple
|
||||
|
||||
import numpy as np
|
||||
import astropy.io.ascii as io_ascii
|
||||
import astropy.units as u
|
||||
|
||||
from astropy.table import MaskedColumn, Table, join, unique, vstack
|
||||
|
||||
from parse_utils import TarCds, WorkaroundCDSReader, open_cds_tarfile, read_gaia
|
||||
|
||||
def make_tyc(tyc1: int, tyc2: int, tyc3: int) -> int:
|
||||
"""Build a synthetic HIP identifier from TYC parts."""
|
||||
return tyc1 + tyc2*10000 + tyc3*1000000000
|
||||
|
||||
TYC_HD_ERRATA = {
|
||||
"add": [
|
||||
# B. Skiff, 30-Jan-2007
|
||||
(make_tyc(8599, 1797, 1), 298954),
|
||||
# B. Skiff, 12-Jul-2007
|
||||
(make_tyc(6886, 1389, 1), 177868),
|
||||
# LMC inner region
|
||||
(make_tyc(9161, 685, 1), 269051),
|
||||
(make_tyc(9165, 548, 1), 269052),
|
||||
(make_tyc(9169, 1563, 1), 269207),
|
||||
(make_tyc(9166, 2, 1), 269367),
|
||||
(make_tyc(9166, 540, 1), 269382),
|
||||
(make_tyc(8891, 278, 1), 269537),
|
||||
(make_tyc(9162, 657, 1), 269599),
|
||||
(make_tyc(9167, 730, 1), 269858),
|
||||
(make_tyc(9163, 960, 2), 269928),
|
||||
(make_tyc(9163, 751, 1), 270005),
|
||||
(make_tyc(8904, 686, 1), 270078),
|
||||
(make_tyc(9163, 887, 1), 270128),
|
||||
(make_tyc(8904, 766, 1), 270342),
|
||||
(make_tyc(9168, 1217, 1), 270435),
|
||||
(make_tyc(8904, 911, 1), 270467),
|
||||
(make_tyc(8904, 5, 1), 270485),
|
||||
# LMC outer region
|
||||
(make_tyc(9157, 1, 1), 270502),
|
||||
(make_tyc(9160, 1142, 1), 270526),
|
||||
(make_tyc(8888, 928, 1), 270765),
|
||||
(make_tyc(8888, 910, 1), 270794),
|
||||
(make_tyc(9172, 791, 1), 272092),
|
||||
# VizieR annotations
|
||||
(make_tyc(7389, 1138, 1), 320669),
|
||||
# extras
|
||||
(make_tyc(1209, 1833, 1), 11502),
|
||||
(make_tyc(1209, 1835, 1), 11503),
|
||||
],
|
||||
"delete": [
|
||||
# B. Skiff (13-Nov-2007)
|
||||
32228,
|
||||
# LMC inner region
|
||||
269686,
|
||||
269784,
|
||||
# LMC outer region
|
||||
270653,
|
||||
271058,
|
||||
271224,
|
||||
271264,
|
||||
271389,
|
||||
271600,
|
||||
271695,
|
||||
271727,
|
||||
271764,
|
||||
271802,
|
||||
271875,
|
||||
# VizieR annotations
|
||||
181060,
|
||||
]
|
||||
}
|
||||
|
||||
def parse_tyc_string(data: Table, src_column: str, dest_column: str='TYC') -> None:
|
||||
"""Parse a TYC string into a synthetic HIP identifier."""
|
||||
tycs = np.array(np.char.split(data[src_column], '-').tolist()).astype(np.int64)
|
||||
data[dest_column] = make_tyc(tycs[:, 0], tycs[:, 1], tycs[:, 2])
|
||||
data.remove_column(src_column)
|
||||
|
||||
def parse_tyc_cols(data: Table,
|
||||
src_columns: Tuple[str, str, str]=('TYC1', 'TYC2', 'TYC3'),
|
||||
dest_column: str='TYC') -> None:
|
||||
"""Convert TYC identifier components into a synthetic HIP identifier."""
|
||||
data[dest_column] = make_tyc(data[src_columns[0]],
|
||||
data[src_columns[1]],
|
||||
data[src_columns[2]])
|
||||
data.remove_columns(src_columns)
|
||||
|
||||
def load_gaia_tyc() -> Table:
|
||||
"""Load the Gaia DR2 TYC2 sources."""
|
||||
print('Loading Gaia DR2 sources for TYC2')
|
||||
|
||||
file_names = ['gaiadr2_tyc-result.csv', 'gaiadr2_tyc-result-extra.csv']
|
||||
gaia = read_gaia([os.path.join('gaia', f) for f in file_names], 'tyc2_id')
|
||||
|
||||
parse_tyc_string(gaia, 'tyc2_id')
|
||||
gaia.add_index('TYC')
|
||||
|
||||
return unique(gaia.group_by('TYC'), keys=['source_id'])
|
||||
|
||||
def load_tyc_spec() -> Table:
|
||||
"""Load the TYC2 spectral type catalogue."""
|
||||
print('Loading TYC2 spectral types')
|
||||
with open_cds_tarfile(os.path.join('vizier', 'tyc2spec.tar.gz')) as tf:
|
||||
data = tf.read_gzip('catalog.dat', ['TYC1', 'TYC2', 'TYC3', 'SpType'])
|
||||
|
||||
parse_tyc_cols(data)
|
||||
data.add_index('TYC')
|
||||
return data
|
||||
|
||||
def _load_ascc_section(tf: TarCds, table: str) -> Table:
|
||||
print(f' Loading {table}')
|
||||
section = tf.read_gzip(table,
|
||||
['Bmag', 'Vmag', 'e_Bmag', 'e_Vmag', 'd3', 'TYC1', 'TYC2', 'TYC3',
|
||||
'Jmag', 'e_Jmag', 'Hmag', 'e_Hmag', 'Kmag', 'e_Kmag', 'SpType'])
|
||||
|
||||
section = section[section['TYC1'] != 0]
|
||||
parse_tyc_cols(section)
|
||||
|
||||
convert_cols = ['Bmag', 'Vmag', 'e_Bmag', 'e_Vmag', 'Jmag', 'e_Jmag', 'Hmag', 'e_Hmag',
|
||||
'Kmag', 'e_Kmag']
|
||||
for col in convert_cols:
|
||||
section[col] = section[col].astype(np.float64)
|
||||
section[col].convert_unit_to(u.mag)
|
||||
section[col].format = '.3f'
|
||||
|
||||
return section
|
||||
|
||||
def load_ascc() -> Table:
|
||||
"""Load ASCC from VizieR archive."""
|
||||
|
||||
print('Loading ASCC')
|
||||
with open_cds_tarfile(os.path.join('vizier', 'ascc.tar.gz')) as tf:
|
||||
data = None
|
||||
for data_file in tf.tf:
|
||||
sections = os.path.split(data_file.name)
|
||||
if (len(sections) != 2 or sections[0] != '.' or not sections[1].startswith('cc')):
|
||||
continue
|
||||
section_data = _load_ascc_section(tf, os.path.splitext(sections[1])[0])
|
||||
if data is None:
|
||||
data = section_data
|
||||
else:
|
||||
data = vstack([data, section_data], join_type='exact')
|
||||
|
||||
data = unique(data.group_by(['TYC', 'd3']), keys=['TYC'])
|
||||
data.rename_column('d3', 'Comp')
|
||||
data.add_index('TYC')
|
||||
return data
|
||||
|
||||
def load_tyc_hd() -> Table:
|
||||
"""Load the Tycho-HD cross index."""
|
||||
print('Loading TYC-HD cross index')
|
||||
with open_cds_tarfile(os.path.join('vizier', 'tyc2hd.tar.gz')) as tf:
|
||||
data = tf.read_gzip('tyc2_hd.dat', ['TYC1', 'TYC2', 'TYC3', 'HD'])
|
||||
|
||||
parse_tyc_cols(data)
|
||||
|
||||
err_del = np.array(TYC_HD_ERRATA['delete'] + [a[1] for a in TYC_HD_ERRATA['add']])
|
||||
data = data[np.logical_not(np.isin(data['HD'], err_del))]
|
||||
|
||||
err_add = Table(np.array(TYC_HD_ERRATA['add']),
|
||||
names=['TYC', 'HD'],
|
||||
dtype=[np.int64, np.int64])
|
||||
|
||||
data = vstack([data, err_add], join_type='exact')
|
||||
|
||||
data = unique(data.group_by('HD'), keys='TYC')
|
||||
data = unique(data.group_by('TYC'), keys='HD')
|
||||
|
||||
return data
|
||||
|
||||
class TYCTeffReader(WorkaroundCDSReader):
|
||||
"""Custom CDS loader for the TYC Teff table to reduce memory usage."""
|
||||
|
||||
def __init__(self, readme: IO):
|
||||
super().__init__('tycall.dat', ['Tycho', 'Teff'], [], readme)
|
||||
|
||||
def create_table(self) -> Table:
|
||||
"""Creates the table."""
|
||||
return Table(
|
||||
[
|
||||
np.empty(self.record_count, np.int64),
|
||||
np.empty(self.record_count, np.float64)
|
||||
],
|
||||
names=['TYC', 'teff_val'])
|
||||
|
||||
def process_line(self, table: Table, record: int, fields: Dict[str, str]) -> bool:
|
||||
"""Processes fields from a line of the input file."""
|
||||
try:
|
||||
tycsplit = fields['Tycho'].split('-')
|
||||
tyc = int(tycsplit[0]) + int(tycsplit[1])*10000 + int(tycsplit[2])*1000000000
|
||||
teff = float(fields['Teff'])
|
||||
except ValueError:
|
||||
tyc = 0
|
||||
teff = 99999
|
||||
|
||||
if teff != 99999:
|
||||
table['TYC'][record] = tyc
|
||||
table['teff_val'][record] = teff
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def load_tyc_teff() -> Table:
|
||||
"""Load the Tycho-2 effective temperatures."""
|
||||
print('Loading TYC2 effective temperatures')
|
||||
with tarfile.open(os.path.join('vizier', 'tyc2teff.tar.gz'), 'r:gz') as tf:
|
||||
with tf.extractfile('./ReadMe') as readme:
|
||||
reader = TYCTeffReader(readme)
|
||||
|
||||
with tf.extractfile('./tycall.dat.gz') as gzf, gzip.open(gzf, 'rt', encoding='ascii') as f:
|
||||
data = reader.read(f)
|
||||
|
||||
data['teff_val'].unit = u.K
|
||||
data.add_index('TYC')
|
||||
return unique(data, keys=['TYC'])
|
||||
|
||||
def load_sao() -> Table:
|
||||
"""Load the SAO-TYC2 cross match."""
|
||||
print('Loading SAO-TYC2 cross match')
|
||||
xmatch_files = ['sao_tyc2_xmatch.csv',
|
||||
'sao_tyc2_suppl1_xmatch.csv',
|
||||
'sao_tyc2_suppl2_xmatch.csv']
|
||||
data = vstack(
|
||||
[io_ascii.read(os.path.join('xmatch', f),
|
||||
include_names=['SAO', 'TYC1', 'TYC2', 'TYC3', 'angDist', 'delFlag'],
|
||||
format='csv',
|
||||
converters={'delFlag': [io_ascii.convert_numpy(np.str)]})
|
||||
for f in xmatch_files],
|
||||
join_type='exact')
|
||||
|
||||
data = data[data['delFlag'].mask]
|
||||
data.remove_column('delFlag')
|
||||
|
||||
parse_tyc_cols(data)
|
||||
|
||||
data = unique(data.group_by(['TYC', 'angDist']), keys=['TYC'])
|
||||
data.remove_column('angDist')
|
||||
|
||||
data.add_index('TYC')
|
||||
return data
|
||||
|
||||
def merge_tables() -> Table:
|
||||
"""Merges the tables."""
|
||||
data = join(load_gaia_tyc(), load_tyc_spec(), keys=['TYC'], join_type='left')
|
||||
data = join(data, load_ascc(),
|
||||
keys=['TYC'],
|
||||
join_type='left',
|
||||
table_names=('gaia', 'ascc'),
|
||||
metadata_conflicts='silent')
|
||||
data['SpType'] = MaskedColumn(data['SpType_gaia'].filled(data['SpType_ascc'].filled('')))
|
||||
data['SpType'].mask = data['SpType'] == ''
|
||||
data.remove_columns(['SpType_gaia', 'SpType_ascc'])
|
||||
|
||||
data = join(data, load_tyc_hd(), keys=['TYC'], join_type='left', metadata_conflicts='silent')
|
||||
|
||||
data = join(data,
|
||||
load_tyc_teff(),
|
||||
keys=['TYC'],
|
||||
join_type='left',
|
||||
table_names=('gaia', 'tycteff'))
|
||||
|
||||
data['teff_val'] = MaskedColumn(
|
||||
data['teff_val_gaia'].filled(data['teff_val_tycteff'].filled(np.nan)))
|
||||
data['teff_val'].mask = np.isnan(data['teff_val'])
|
||||
data.remove_columns(['teff_val_tycteff', 'teff_val_gaia'])
|
||||
|
||||
data = join(data, load_sao(), keys=['TYC'], join_type='left')
|
||||
return data
|
||||
|
||||
def process_tyc() -> Table:
|
||||
"""Processes the TYC data."""
|
||||
data = merge_tables()
|
||||
data.rename_column('r_est', 'dist_use')
|
||||
return data
|
|
@ -1,178 +0,0 @@
|
|||
# gaia-stardb: Processing Gaia DR2 for celestia.Sci/Celestia
|
||||
# Copyright (C) 2019–2020 Andrew Tribick
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
"""Common utilities for parsers."""
|
||||
|
||||
import gzip
|
||||
import re
|
||||
import tarfile
|
||||
import warnings
|
||||
|
||||
from contextlib import contextmanager
|
||||
from tarfile import TarFile
|
||||
from typing import Dict, Generator, IO, List, TextIO, Tuple, Union
|
||||
|
||||
import astropy.io.ascii as io_ascii
|
||||
import astropy.units as u
|
||||
import numpy as np
|
||||
|
||||
from astropy.table import Table, vstack
|
||||
from astropy.units import UnitsWarning
|
||||
|
||||
def read_gaia(files: Union[str, List[str]], id_name: str, *, extra_fields: List[str]=None):
|
||||
"""Parse the CSV files produced by querying the Gaia TAP endpoint."""
|
||||
fields = ['source_id', id_name, 'ra', 'dec', 'phot_g_mean_mag', 'bp_rp', 'teff_val', 'r_est']
|
||||
if extra_fields is not None:
|
||||
fields += extra_fields
|
||||
|
||||
if isinstance(files, str):
|
||||
gaia = io_ascii.read(files, include_names=fields, format='csv')
|
||||
else:
|
||||
gaia = vstack([io_ascii.read(f, include_names=fields, format='csv') for f in files],
|
||||
join_type='exact')
|
||||
|
||||
gaia['ra'].unit = u.deg
|
||||
gaia['dec'].unit = u.deg
|
||||
gaia['phot_g_mean_mag'].unit = u.mag
|
||||
gaia['bp_rp'].unit = u.mag
|
||||
gaia['teff_val'].unit = u.K
|
||||
gaia['r_est'].unit = u.pc
|
||||
|
||||
return gaia
|
||||
|
||||
class TarCds:
|
||||
"""Routines for accessing CDS files contained with a tar archive."""
|
||||
def __init__(self, tf: TarFile):
|
||||
self.tf = tf
|
||||
|
||||
def read(self, table: str, names: List[str], *, readme_name=None, **kwargs) -> Table:
|
||||
"""Reads a table from the CDS archive."""
|
||||
if readme_name is None:
|
||||
readme_name = table
|
||||
with self.tf.extractfile('./ReadMe') as readme:
|
||||
reader = self._create_reader(readme, readme_name, names, **kwargs)
|
||||
with self.tf.extractfile(f'./{table}') as f:
|
||||
return self._read(reader, f)
|
||||
|
||||
def read_gzip(self, table: str, names: List[str], *, readme_name=None, **kwargs) -> Table:
|
||||
"""Reads a gzipped table from the CDS archive."""
|
||||
if readme_name is None:
|
||||
readme_name = table
|
||||
with self.tf.extractfile('./ReadMe') as readme:
|
||||
reader = self._create_reader(readme, readme_name, names, **kwargs)
|
||||
with self.tf.extractfile(f'./{table}.gz') as gzf, gzip.open(gzf, 'rb') as f:
|
||||
return self._read(reader, f)
|
||||
|
||||
@classmethod
|
||||
def _create_reader(cls, readme: IO, table: str, names: List[str], **kwargs) -> io_ascii.Cds:
|
||||
reader = io_ascii.get_reader(io_ascii.Cds,
|
||||
readme=readme,
|
||||
include_names=names,
|
||||
**kwargs)
|
||||
reader.data.table_name = table
|
||||
return reader
|
||||
|
||||
@classmethod
|
||||
def _read(cls, reader: io_ascii.Cds, file: IO) -> Table:
|
||||
# Suppress a warning generated because the reader does not handle logarithmic units
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter('ignore', UnitsWarning)
|
||||
return reader.read(file)
|
||||
|
||||
@contextmanager
|
||||
def open_cds_tarfile(file: str) -> Generator[TarCds, None, None]:
|
||||
"""Opens a CDS tarfile."""
|
||||
with tarfile.open(file, 'r:gz') as tf:
|
||||
yield TarCds(tf)
|
||||
|
||||
class WorkaroundCDSReader:
|
||||
"""Custom CDS file reader to work around errors in input data formats."""
|
||||
|
||||
def __init__(self, table: str, labels: List[str], dtypes: List[np.dtype], readme: IO):
|
||||
self.labels = labels
|
||||
self.dtypes = dtypes
|
||||
self.record_count, self.ranges = self._get_fields(table, labels, readme)
|
||||
|
||||
def read(self, file: TextIO) -> Table:
|
||||
"""Reads the input file according to the field specifications."""
|
||||
table = self.create_table()
|
||||
record = 0
|
||||
for line in file:
|
||||
fields = { l: line[self.ranges[l][0]:self.ranges[l][1]].strip() for l in self.labels }
|
||||
if self.process_line(table, record, fields):
|
||||
record += 1
|
||||
return table[0:record]
|
||||
|
||||
def create_table(self) -> Table:
|
||||
"""Creates the table."""
|
||||
return Table([np.empty(self.record_count, d) for d in self.dtypes], names=self.labels)
|
||||
|
||||
def process_line(self, table: Table, record: int, fields: Dict[str, str]) -> bool:
|
||||
"""Processes fields from a line of the input file."""
|
||||
for label, dtype in zip(self.labels, self.dtypes):
|
||||
try:
|
||||
table[label][record] = np.fromstring(fields[label], dtype=dtype, sep=' ')[0]
|
||||
except IndexError:
|
||||
return False
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def _get_fields(cls, table: str, labels: List[str], readme: IO) \
|
||||
-> Tuple[int, Dict[str, Tuple[int, int]]]:
|
||||
ranges = {}
|
||||
|
||||
re_file = re.compile(re.escape(table) + r'\ +[0-9]+\ +(?P<length>[0-9]+)')
|
||||
re_table = re.compile(r'Byte-by-byte Description of file: (?P<name>\S+)$')
|
||||
re_field = re.compile(r'''\ *(?P<start>[0-9]+)\ *-\ *(?P<end>[0-9]+) # range
|
||||
\ +\S+ # format
|
||||
\ +\S+ # units
|
||||
\ +(?P<label>\S+) # label''', re.X)
|
||||
record_count = None
|
||||
current_table = None
|
||||
for line in readme:
|
||||
try:
|
||||
line = line.decode('ascii')
|
||||
except AttributeError:
|
||||
pass
|
||||
match = re_file.match(line)
|
||||
if match:
|
||||
record_count = int(match.group('length'))
|
||||
continue
|
||||
match = re_table.match(line)
|
||||
if match:
|
||||
current_table = match.group('name')
|
||||
continue
|
||||
if current_table != table:
|
||||
continue
|
||||
match = re_field.match(line)
|
||||
if not match:
|
||||
continue
|
||||
|
||||
label = match.group('label')
|
||||
if label in labels:
|
||||
ranges[label] = int(match.group('start'))-1, int(match.group('end'))
|
||||
|
||||
if len(ranges) == len(labels):
|
||||
break
|
||||
|
||||
if record_count is None:
|
||||
raise RuntimeError('Could not get record count')
|
||||
if len(ranges) != len(labels):
|
||||
missing = ", ".join(l for l in labels if l not in ranges)
|
||||
raise RuntimeError(f'Could not find {missing} fields')
|
||||
|
||||
return record_count, ranges
|
|
@ -1,20 +0,0 @@
|
|||
Arpeggio==1.9.2
|
||||
astropy==4.0.1.post1
|
||||
astroquery==0.4.1
|
||||
beautifulsoup4==4.9.1
|
||||
certifi==2020.6.20
|
||||
cffi==1.14.2
|
||||
chardet==3.0.4
|
||||
cryptography==3.3.2
|
||||
html5lib==1.1
|
||||
idna==2.10
|
||||
jeepney==0.4.3
|
||||
keyring==21.3.0
|
||||
numpy==1.19.1
|
||||
pycparser==2.20
|
||||
requests==2.25.1
|
||||
SecretStorage==3.1.2
|
||||
six==1.15.0
|
||||
soupsieve==2.0.1
|
||||
urllib3==1.26.5
|
||||
webencodings==0.5.1
|
|
@ -1,412 +0,0 @@
|
|||
# gaia-stardb: Processing Gaia DR2 for celestia.Sci/Celestia
|
||||
# Copyright (C) 2019–2020 Andrew Tribick
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
"""Routines for parsing spectral types."""
|
||||
|
||||
import re
|
||||
|
||||
from enum import IntEnum
|
||||
|
||||
from arpeggio import (NoMatch, OneOrMore, Optional, ParserPython, PTNodeVisitor, RegExMatch,
|
||||
ZeroOrMore, visit_parse_tree)
|
||||
|
||||
class CelMkClass(IntEnum):
|
||||
"""Celestia MK and WD classes."""
|
||||
O = 0x0000
|
||||
B = 0x0100
|
||||
A = 0x0200
|
||||
F = 0x0300
|
||||
G = 0x0400
|
||||
K = 0x0500
|
||||
M = 0x0600
|
||||
R = 0x0700
|
||||
S = 0x0800
|
||||
N = 0x0900
|
||||
WC = 0x0a00
|
||||
WN = 0x0b00
|
||||
Unknown = 0x0c00
|
||||
L = 0x0d00
|
||||
T = 0x0e00
|
||||
C = 0x0f00
|
||||
DA = 0x1000
|
||||
DB = 0x1100
|
||||
DC = 0x1200
|
||||
DO = 0x1300
|
||||
DQ = 0x1400
|
||||
DZ = 0x1500
|
||||
D = 0x1600
|
||||
DX = 0x1700
|
||||
|
||||
CEL_UNKNOWN_SUBCLASS = 0x00a0
|
||||
|
||||
class CelLumClass(IntEnum):
|
||||
"""Celestia luminosity classes."""
|
||||
Ia0 = 0x0000
|
||||
Ia = 0x0001
|
||||
Ib = 0x0002
|
||||
II = 0x0003
|
||||
III = 0x0004
|
||||
IV = 0x0005
|
||||
V = 0x0006
|
||||
VI = 0x0007
|
||||
Unknown = 0x0008
|
||||
|
||||
CEL_UNKNOWN_STAR = CelMkClass.Unknown + CEL_UNKNOWN_SUBCLASS + CelLumClass.Unknown
|
||||
|
||||
# pylint: disable=missing-function-docstring,multiple-statements
|
||||
|
||||
# Format specification
|
||||
|
||||
def spacer(): return ZeroOrMore(' ')
|
||||
def rangesym(): return spacer, ['/', '-'], spacer
|
||||
def uncertain(): return Optional([':', '?'])
|
||||
def numeric(): return RegExMatch(r'[0-9]+(\.[0-9]*)?')
|
||||
def roman(): return RegExMatch(r'(I[VX]|VI{0,3}|I{1,3})([Aa]?[Bb]?)')
|
||||
def prefix(): return ['esd', 'sd', 'd', 'g', 'c']
|
||||
|
||||
# MS stars, spectra written as, e.g. M3S
|
||||
|
||||
def msnorange(): return 'M', spacer, Optional(numeric, spacer), 'S'
|
||||
def msrange():
|
||||
return [
|
||||
('M', spacer, numeric, rangesym, numeric, spacer, 'S'),
|
||||
('M', spacer, numeric, ['-', '+'], spacer, 'S')]
|
||||
|
||||
# normal MK spectra, e.g. K4 plus Wolf-Rayet stars
|
||||
|
||||
def mkclass(): return ['WN', 'WC', 'WO', 'WR', 'O', 'B', 'A', 'F', 'G', 'K', 'M', 'L', 'T', 'Y']
|
||||
def mknorange():
|
||||
return [
|
||||
(mkclass, spacer, '(', numeric, ')'),
|
||||
(mkclass, Optional(spacer, numeric))]
|
||||
def mkrange():
|
||||
return [
|
||||
(mkclass, spacer, numeric, rangesym, numeric),
|
||||
(mkclass, spacer, '(', numeric, rangesym, numeric, ')'),
|
||||
(mkclass, spacer, numeric, ['-', '+'])]
|
||||
|
||||
def mkmsnorange(): return [msnorange, mknorange]
|
||||
def mkmsrange(): return [msrange, mkrange]
|
||||
|
||||
def mktype():
|
||||
return [
|
||||
(mkmsnorange, rangesym, mkmsnorange),
|
||||
mkmsrange,
|
||||
mkmsnorange]
|
||||
|
||||
def lumclass(): return ['0', 'Vz', roman]
|
||||
def lumrange(): return ['Ia0', (lumclass, Optional(rangesym, lumclass))]
|
||||
def lumtype():
|
||||
return [
|
||||
(lumrange, Optional(uncertain)),
|
||||
('(', lumrange, ')')]
|
||||
|
||||
def noprefixstar():
|
||||
return [
|
||||
(mktype, uncertain, spacer, Optional(lumtype)),
|
||||
('(', mktype, ')', spacer, Optional(lumtype))]
|
||||
def normalstar(): return (Optional(prefix, uncertain), noprefixstar)
|
||||
|
||||
# metallic stars (kA5hF0mA4 etc.)
|
||||
|
||||
def metalprefix(): return ['g', 'h', 'k', 'm', 'He']
|
||||
def metalsection(): return metalprefix, noprefixstar
|
||||
def metalstar(): return Optional(noprefixstar), OneOrMore(metalsection)
|
||||
|
||||
# S stars and carbon stars - these can have an abundance index in addition to
|
||||
# the subclass, furthermore carbon stars can be written with the subtype
|
||||
# written as a suffix, e.g. C4,3J instead of C-J4,3
|
||||
|
||||
def cclass(): return ['C-R', 'C-N', 'C-J', 'C-Hd', 'C-H'], uncertain
|
||||
def scclass(): return ['SC', 'S', cclass, 'C', 'R', 'N']
|
||||
def scsuffix(): return ['J', 'H', 'Hd']
|
||||
def scrange():
|
||||
return [
|
||||
(numeric, '-', Optional(numeric)),
|
||||
(numeric, '+'),
|
||||
(numeric, Optional(OneOrMore(' '), '-', OneOrMore(' '), numeric))]
|
||||
|
||||
def scindices(): return spacer, scrange, Optional(['/', ','], scrange)
|
||||
|
||||
def scsuffixed(): return 'C', Optional(scindices), spacer, scsuffix
|
||||
def scnosuffix(): return scclass, Optional(scindices)
|
||||
def sctype(): return [scsuffixed, scnosuffix]
|
||||
def scstar(): return Optional(prefix), sctype, spacer, Optional(lumtype)
|
||||
|
||||
# white dwarfs
|
||||
|
||||
def wdclass(): return ['DA', 'DB', 'DC', 'DO', 'DZ', 'DQ', 'DX', 'D']
|
||||
def wdstar():
|
||||
return [
|
||||
(wdclass, numeric, Optional(rangesym, numeric)),
|
||||
(wdclass, Optional(rangesym, wdclass))]
|
||||
|
||||
def starspectrum(): return [metalstar, normalstar, scstar, wdstar]
|
||||
def spectrum():
|
||||
return [
|
||||
starspectrum,
|
||||
('(', starspectrum, ')'),
|
||||
('[', starspectrum, ']')]
|
||||
|
||||
class SpecVisitor(PTNodeVisitor):
|
||||
"""Parse tree visitor to compute Celestia spectral type."""
|
||||
|
||||
# pylint: disable=unused-argument,no-self-use,redefined-outer-name,too-many-public-methods
|
||||
|
||||
def visit_spacer(self, node, children):
|
||||
return None
|
||||
|
||||
def visit_rangesym(self, node, children):
|
||||
return None
|
||||
|
||||
def visit_uncertain(self, node, children):
|
||||
return None
|
||||
|
||||
def visit_numeric(self, node, children):
|
||||
return int(float(node.value))
|
||||
|
||||
def visit_prefix(self, node, children):
|
||||
if str(node) == 'esd' or str(node) == 'sd':
|
||||
lclass = CelLumClass.VI
|
||||
elif str(node) == 'd':
|
||||
lclass = CelLumClass.V
|
||||
elif str(node) == 'g':
|
||||
lclass = CelLumClass.III
|
||||
elif str(node) == 'c':
|
||||
lclass = CelLumClass.Ib
|
||||
else:
|
||||
raise ValueError
|
||||
return lclass
|
||||
|
||||
def visit_msnorange(self, node, children):
|
||||
if len(children.numeric) > 0:
|
||||
return 'M', children.numeric[0]
|
||||
return 'M', None
|
||||
|
||||
def visit_msrange(self, node, children):
|
||||
return 'M', children.numeric[0]
|
||||
|
||||
def visit_mknorange(self, node, children):
|
||||
if len(children.numeric) > 0:
|
||||
return children.mkclass[0], children.numeric[0]
|
||||
return children.mkclass[0], None
|
||||
|
||||
def visit_mkrange(self, node, children):
|
||||
return children.mkclass[0], children.numeric[0]
|
||||
|
||||
def visit_mktype(self, node, children):
|
||||
mkclass, subclass = children[0]
|
||||
if subclass is None:
|
||||
subclass = CEL_UNKNOWN_SUBCLASS
|
||||
elif subclass < 0:
|
||||
subclass = 0x00
|
||||
elif subclass > 9:
|
||||
subclass = 0x90
|
||||
else:
|
||||
subclass *= 0x10
|
||||
|
||||
if mkclass == 'Y':
|
||||
return CelMkClass.T, 0x90
|
||||
if mkclass in ('WR', 'WO'):
|
||||
return CelMkClass.WC, subclass
|
||||
return CelMkClass[mkclass], subclass
|
||||
|
||||
def visit_lumrange(self, node, children):
|
||||
if (len(children) == 2
|
||||
and (children[0] in ('Ia', 'IA'))
|
||||
and children[1] == '0'):
|
||||
lclass = CelLumClass.Ia0
|
||||
elif children[0] in ('Ia0', 'IA0', '0'):
|
||||
lclass = CelLumClass.Ia0
|
||||
elif children[0].startswith('III'):
|
||||
lclass = CelLumClass.III
|
||||
elif children[0].startswith('II'):
|
||||
lclass = CelLumClass.II
|
||||
elif children[0].startswith('IV'):
|
||||
lclass = CelLumClass.IV
|
||||
elif children[0].startswith('IX'):
|
||||
lclass = CelLumClass.VI
|
||||
elif children[0] in ('Ia', 'IA'):
|
||||
lclass = CelLumClass.Ia
|
||||
elif children[0].startswith('I'):
|
||||
lclass = CelLumClass.Ib
|
||||
elif children[0].startswith('VI'): # VII, VIII as well
|
||||
lclass = CelLumClass.VI
|
||||
elif children[0].startswith('V'):
|
||||
lclass = CelLumClass.V
|
||||
else:
|
||||
raise ValueError
|
||||
return lclass
|
||||
|
||||
def visit_lumtype(self, node, children):
|
||||
return children.lumrange[0]
|
||||
|
||||
def visit_noprefixstar(self, node, children):
|
||||
mkclass, mksubclass = children.mktype[0]
|
||||
if len(children.lumtype) > 0:
|
||||
return mkclass, mksubclass, children.lumtype[0]
|
||||
return mkclass, mksubclass, CelLumClass.Unknown
|
||||
|
||||
def visit_normalstar(self, node, children):
|
||||
mkclass, mksubclass, lclass = children.noprefixstar[0]
|
||||
|
||||
if len(children.prefix) > 0:
|
||||
lclass = children.prefix[0]
|
||||
|
||||
return mkclass, mksubclass, lclass
|
||||
|
||||
def visit_metalsection(self, node, children):
|
||||
return children[0], children[1]
|
||||
|
||||
def visit_metalstar(self, node, children):
|
||||
sections = dict(children.metalsection)
|
||||
if len(children.noprefixstar) > 0:
|
||||
sections[' '] = children.noprefixstar[0]
|
||||
first_section = sections[' ']
|
||||
else:
|
||||
first_section = children.metalsection[0][1]
|
||||
|
||||
overall_lclass = children.metalsection[-1][1][2]
|
||||
if len(sections) == 1:
|
||||
selected = sections[next(iter(sections))]
|
||||
elif ' ' in sections:
|
||||
selected = sections[' ']
|
||||
elif 'h' in sections:
|
||||
selected = sections['h']
|
||||
elif 'k' in sections:
|
||||
selected = sections['k']
|
||||
else:
|
||||
selected = first_section
|
||||
|
||||
if selected[2] is None:
|
||||
return selected[0], selected[1], overall_lclass
|
||||
return selected
|
||||
|
||||
def visit_cclass(self, node, children):
|
||||
return children[0]
|
||||
|
||||
def visit_scrange(self, node, children):
|
||||
return int(children[0])
|
||||
|
||||
def visit_scindices(self, node, children):
|
||||
return children.scrange[0]
|
||||
|
||||
def visit_scnosuffix(self, node, children):
|
||||
if len(children.scindices) > 0:
|
||||
return children.scclass[0], children.scindices[0]
|
||||
return children.scclass[0], None
|
||||
|
||||
def visit_scsuffixed(self, node, children):
|
||||
ctype = 'C-' + children.scsuffix[0]
|
||||
if len(children.scindices) > 0:
|
||||
return ctype, children.scindices[0]
|
||||
return ctype, None
|
||||
|
||||
def visit_scstar(self, node, children):
|
||||
scclass, scsubclass = children.sctype[0]
|
||||
if len(children.prefix) > 0:
|
||||
lclass = children.prefix[0]
|
||||
elif len(children.lumtype) > 0:
|
||||
lclass = children.lumtype[0]
|
||||
else:
|
||||
lclass = CelLumClass.Unknown
|
||||
|
||||
if scsubclass is None:
|
||||
scsubclass = CEL_UNKNOWN_SUBCLASS
|
||||
elif scsubclass < 0:
|
||||
scsubclass = 0x00
|
||||
elif scsubclass > 9:
|
||||
scsubclass = 0x90
|
||||
else:
|
||||
scsubclass *= 0x10
|
||||
|
||||
if scclass in ('C-R', 'R'):
|
||||
return CelMkClass.R, scsubclass, lclass
|
||||
if scclass in ('C-N', 'N'):
|
||||
return CelMkClass.N, scsubclass, lclass
|
||||
if scclass == 'SC':
|
||||
return CelMkClass.S, scsubclass, lclass
|
||||
if scclass.startswith('C'):
|
||||
return CelMkClass.C, scsubclass, lclass
|
||||
return CelMkClass[scclass], scsubclass, lclass
|
||||
|
||||
def visit_wdstar(self, node, children):
|
||||
try:
|
||||
wdclass = CelMkClass[children.wdclass[0]]
|
||||
except KeyError:
|
||||
wdclass = CelMkClass.D
|
||||
|
||||
if len(children.numeric) > 0:
|
||||
wdsubclass = children.numeric[0]
|
||||
if wdsubclass < 0:
|
||||
wdsubclass = 0
|
||||
elif wdsubclass > 9:
|
||||
wdsubclass = 0x90
|
||||
else:
|
||||
wdsubclass *= 0x10
|
||||
|
||||
else:
|
||||
wdsubclass = CEL_UNKNOWN_SUBCLASS
|
||||
|
||||
return wdclass, wdsubclass
|
||||
|
||||
# pylint: enable=missing-function-docstring
|
||||
|
||||
PARSER = ParserPython(spectrum, skipws=False)
|
||||
VISITOR = SpecVisitor()
|
||||
MULTISEPARATOR = re.compile(r'\+\ *(?:\.{2,}|(?:\(?(?:sd|d|g|c|k|h|m|g|He)?[OBAFGKM]|W[DNOCR]|wd))')
|
||||
|
||||
def parse_spectrum(sptype: str) -> int:
|
||||
"""Parse a spectral type string into a Celestia spectral type."""
|
||||
|
||||
# resolve ambiguity in grammar: B 0-Ia could be interpreted as (B 0-) Ia or B (0-Ia)
|
||||
# resolve in favour of latter
|
||||
processed_type = sptype.strip().replace('0-Ia', 'Ia-0')
|
||||
|
||||
if not processed_type:
|
||||
return CEL_UNKNOWN_STAR
|
||||
|
||||
# remove outer brackets
|
||||
while ((processed_type[0] == '(' and processed_type[-1] == ')')
|
||||
or (processed_type[0] == '[' and processed_type[-1] == ']')):
|
||||
processed_type = processed_type[1:-1]
|
||||
|
||||
# remove leading uncertainty indicator
|
||||
if processed_type[0] == ':':
|
||||
processed_type = processed_type[1:]
|
||||
|
||||
# deal with cases where an O-type spectrum is represented using 0
|
||||
if processed_type[0] == '0':
|
||||
processed_type = 'O' + processed_type[1:]
|
||||
|
||||
# remove nebulae and novae (might otherwise be parsed as N-type)
|
||||
if (processed_type.casefold().startswith("neb".casefold())
|
||||
or processed_type.casefold().startswith("nova".casefold())):
|
||||
return CEL_UNKNOWN_STAR
|
||||
|
||||
# resolve ambiguity about whether + is an open-ended range or identifies a component
|
||||
|
||||
separator_match = MULTISEPARATOR.search(processed_type)
|
||||
if separator_match:
|
||||
processed_type = processed_type[:separator_match.span()[0]]
|
||||
|
||||
try:
|
||||
parse_tree = PARSER.parse(processed_type)
|
||||
except NoMatch:
|
||||
return CEL_UNKNOWN_STAR
|
||||
|
||||
return sum(visit_parse_tree(parse_tree, VISITOR))
|
Loading…
Reference in New Issue