Skip to content

Commit

Permalink
Use import aliases consistently for numpy and pandas
Browse files Browse the repository at this point in the history
  • Loading branch information
larsevj authored and eivindjahren committed Aug 26, 2024
1 parent f3ccfec commit 062f770
Show file tree
Hide file tree
Showing 6 changed files with 68 additions and 71 deletions.
48 changes: 24 additions & 24 deletions python/resdata/grid/rd_grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@
import ctypes

import warnings
import numpy
import pandas
import numpy as np
import pandas as pd
import sys
import os.path
import math
Expand Down Expand Up @@ -1148,11 +1148,11 @@ def create_kw(self, array, kw_name, pack):
and dims[2] == self.getNZ()
):
dtype = array.dtype
if dtype == numpy.int32:
if dtype == np.int32:
type = ResDataType.RD_INT
elif dtype == numpy.float32:
elif dtype == np.float32:
type = ResDataType.RD_FLOAT
elif dtype == numpy.float64:
elif dtype == np.float64:
type = ResDataType.RD_DOUBLE
else:
sys.exit("Do not know how to create rd_kw from type:%s" % dtype)
Expand All @@ -1177,7 +1177,7 @@ def create_kw(self, array, kw_name, pack):
kw[active_index] = array[i, j, k]
active_index += 1
else:
if dtype == numpy.int32:
if dtype == np.int32:
kw[global_index] = int(array[i, j, k])
else:
kw[global_index] = array[i, j, k]
Expand Down Expand Up @@ -1221,7 +1221,7 @@ def create_3d(self, rd_kw, default=0):
"""
if len(rd_kw) == self.getNumActive() or len(rd_kw) == self.getGlobalSize():
array = numpy.ones([self.getGlobalSize()], dtype=rd_kw.dtype) * default
array = np.ones([self.getGlobalSize()], dtype=rd_kw.dtype) * default
kwa = rd_kw.array
if len(rd_kw) == self.getGlobalSize():
for i in range(kwa.size):
Expand Down Expand Up @@ -1383,14 +1383,14 @@ def export_index(self, active_only=False):
size = self.get_num_active()
else:
size = self.get_global_size()
indx = numpy.zeros(size, dtype=numpy.int32)
data = numpy.zeros([size, 4], dtype=numpy.int32)
indx = np.zeros(size, dtype=np.int32)
data = np.zeros([size, 4], dtype=np.int32)
self._export_index_frame(
indx.ctypes.data_as(ctypes.POINTER(ctypes.c_int32)),
data.ctypes.data_as(ctypes.POINTER(ctypes.c_int32)),
active_only,
)
df = pandas.DataFrame(data=data, index=indx, columns=["i", "j", "k", "active"])
df = pd.DataFrame(data=data, index=indx, columns=["i", "j", "k", "active"])
return df

def export_data(self, index_frame, kw, default=0):
Expand All @@ -1404,17 +1404,17 @@ def export_data(self, index_frame, kw, default=0):
If kw is of length num_active, values in the output vector
corresponding to inactive cells are set to default.
"""
if not isinstance(index_frame, pandas.DataFrame):
if not isinstance(index_frame, pd.DataFrame):
raise TypeError("index_frame must be pandas.DataFrame")
if len(kw) == self.get_global_size():
index = index_frame.index.to_numpy(dtype=numpy.int32, copy=True)
index = index_frame.index.to_numpy(dtype=np.int32, copy=True)
elif len(kw) == self.get_num_active():
index = index_frame["active"].to_numpy(dtype=numpy.int32, copy=True)
index = index_frame["active"].to_numpy(dtype=np.int32, copy=True)
else:
raise ValueError("The keyword must have a 3D compatible length")

if kw.type is ResdataTypeEnum.RD_INT_TYPE:
data = numpy.full(len(index), default, dtype=numpy.int32)
data = np.full(len(index), default, dtype=np.int32)
self._export_data_as_int(
len(index),
index.ctypes.data_as(ctypes.POINTER(ctypes.c_int32)),
Expand All @@ -1426,7 +1426,7 @@ def export_data(self, index_frame, kw, default=0):
kw.type is ResdataTypeEnum.RD_FLOAT_TYPE
or kw.type is ResdataTypeEnum.RD_DOUBLE_TYPE
):
data = numpy.full(len(index), default, dtype=numpy.float64)
data = np.full(len(index), default, dtype=np.float64)
self._export_data_as_double(
len(index),
index.ctypes.data_as(ctypes.POINTER(ctypes.c_int32)),
Expand All @@ -1444,10 +1444,10 @@ def export_volume(self, index_frame):
Index_fram must be a pandas dataframe with the same structure
as obtained from export_index.
"""
if not isinstance(index_frame, pandas.DataFrame):
if not isinstance(index_frame, pd.DataFrame):
raise TypeError("index_frame must be pandas.DataFrame")
index = index_frame.index.to_numpy(dtype=numpy.int32, copy=True)
data = numpy.zeros(len(index), dtype=numpy.float64)
index = index_frame.index.to_numpy(dtype=np.int32, copy=True)
data = np.zeros(len(index), dtype=np.float64)
self._export_volume(
len(index),
index.ctypes.data_as(ctypes.POINTER(ctypes.c_int32)),
Expand All @@ -1462,10 +1462,10 @@ def export_position(self, index_frame):
Index_fram must be a pandas dataframe with the same structure
as obtained from export_index.
"""
if not isinstance(index_frame, pandas.DataFrame):
if not isinstance(index_frame, pd.DataFrame):
raise TypeError("index_frame must be pandas.DataFrame")
index = index_frame.index.to_numpy(dtype=numpy.int32, copy=True)
data = numpy.zeros([len(index), 3], dtype=numpy.float64)
index = index_frame.index.to_numpy(dtype=np.int32, copy=True)
data = np.zeros([len(index), 3], dtype=np.float64)
self._export_position(
len(index),
index.ctypes.data_as(ctypes.POINTER(ctypes.c_int32)),
Expand Down Expand Up @@ -1508,10 +1508,10 @@ def export_corners(self, index_frame):
increase 'towards the sky'; the safest way is probably to check this
explicitly if it matters for the case at hand.
"""
if not isinstance(index_frame, pandas.DataFrame):
if not isinstance(index_frame, pd.DataFrame):
raise TypeError("index_frame must be pandas.DataFrame")
index = index_frame.index.to_numpy(dtype=numpy.int32, copy=True)
data = numpy.zeros([len(index), 24], dtype=numpy.float64)
index = index_frame.index.to_numpy(dtype=np.int32, copy=True)
data = np.zeros([len(index), 24], dtype=np.float64)
self._export_corners(
len(index),
index.ctypes.data_as(ctypes.POINTER(ctypes.c_int32)),
Expand Down
33 changes: 17 additions & 16 deletions python/resdata/grid/rd_grid_generator.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import itertools, numpy
import itertools
import numpy as np
from math import sqrt

from resdata import ResdataPrototype
Expand Down Expand Up @@ -34,11 +35,11 @@ def pre_mapaxes_translation(translation, mapaxes):

x, y, z = translation

unit_y = numpy.array((mapaxes[0] - mapaxes[2], mapaxes[1] - mapaxes[3]))
unit_y /= sqrt(numpy.sum(unit_y * unit_y))
unit_y = np.array((mapaxes[0] - mapaxes[2], mapaxes[1] - mapaxes[3]))
unit_y /= sqrt(np.sum(unit_y * unit_y))

unit_x = numpy.array((mapaxes[4] - mapaxes[2], mapaxes[5] - mapaxes[3]))
unit_x /= sqrt(numpy.sum(unit_x * unit_x))
unit_x = np.array((mapaxes[4] - mapaxes[2], mapaxes[5] - mapaxes[3]))
unit_x /= sqrt(np.sum(unit_x * unit_x))

det = 1.0 / (unit_x[0] * unit_y[1] - unit_x[1] * unit_y[0])

Expand Down Expand Up @@ -384,24 +385,24 @@ def assert_zcorn(cls, nx, ny, nz, zcorn, twisted_check=True):

@classmethod
def __scale_coord(cls, coord, scale, lower_center):
coord = numpy.array(
coord = np.array(
[list(map(float, coord[i : i + 6 :])) for i in range(0, len(coord), 6)]
)
origo = numpy.array(3 * [0.0] + list(lower_center) + [0])
scale = numpy.array(3 * [1.0] + 2 * [scale] + [1])
origo = np.array(3 * [0.0] + list(lower_center) + [0])
scale = np.array(3 * [1.0] + 2 * [scale] + [1])
coord = scale * (coord - origo) + origo
return coord.flatten().tolist()

@classmethod
def __misalign_coord(cls, coord, dims, dV):
nx, ny, nz = dims

coord = numpy.array(
coord = np.array(
[list(map(float, coord[i : i + 6 :])) for i in range(0, len(coord), 6)]
)

tf = lambda i, j: 1.0 / 2 if abs(i) + abs(j) <= 1 else 0.25
adjustment = numpy.array(
adjustment = np.array(
[
(0, 0, 0, i * tf(i, j) * dV[0], j * tf(i, j) * dV[1], 0)
for i, j in itertools.product([-1, 0, 1], repeat=2)
Expand All @@ -421,11 +422,11 @@ def __misalign_coord(cls, coord, dims, dV):

@classmethod
def __rotate_coord(cls, coord, lower_center):
coord = numpy.array(
coord = np.array(
[list(map(float, coord[i : i + 6 :])) for i in range(0, len(coord), 6)]
)

origo = numpy.array(3 * [0.0] + list(lower_center) + [0])
origo = np.array(3 * [0.0] + list(lower_center) + [0])
coord -= origo

for c in coord:
Expand All @@ -436,10 +437,10 @@ def __rotate_coord(cls, coord, lower_center):

@classmethod
def __translate_lower_coord(cls, coord, translation):
coord = numpy.array(
coord = np.array(
[list(map(float, coord[i : i + 6 :])) for i in range(0, len(coord), 6)]
)
translation = numpy.array(3 * [0.0] + list(translation))
translation = np.array(3 * [0.0] + list(translation))

coord = coord + translation
return coord.flatten().tolist()
Expand Down Expand Up @@ -572,10 +573,10 @@ def extract_actnum(cls, dims, actnum, ijk_bounds):

@classmethod
def __translate_coord(cls, coord, translation):
coord = numpy.array(
coord = np.array(
[list(map(float, coord[i : i + 6 :])) for i in range(0, len(coord), 6)]
)
translation = numpy.array(list(translation) + list(translation))
translation = np.array(list(translation) + list(translation))

coord = coord + translation
return construct_floatKW("COORD", coord.flatten().tolist())
Expand Down
18 changes: 9 additions & 9 deletions python/resdata/resfile/rd_kw.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import ctypes
import warnings

import numpy
import numpy as np
from cwrap import CFILE, BaseCClass
from resdata import ResdataPrototype, ResDataType, ResdataTypeEnum, ResdataUtil
from resdata.util.util import monkey_the_camel
Expand Down Expand Up @@ -415,15 +415,15 @@ def __private_init(self):

if self.data_type.is_int():
self.data_ptr = self._int_ptr()
self.dtype = numpy.int32
self.dtype = np.int32
self.str_fmt = "%8d"
elif self.data_type.is_float():
self.data_ptr = self._float_ptr()
self.dtype = numpy.float32
self.dtype = np.float32
self.str_fmt = "%13.4f"
elif self.data_type.is_double():
self.data_ptr = self._double_ptr()
self.dtype = numpy.float64
self.dtype = np.float64
self.str_fmt = "%13.4f"
else:
# Iteration not supported for CHAR / BOOL
Expand Down Expand Up @@ -1078,19 +1078,19 @@ def numpy_view(self):
other.
"""

if self.dtype is numpy.float64:
if self.dtype is np.float64:
ct = ctypes.c_double
elif self.dtype is numpy.float32:
elif self.dtype is np.float32:
ct = ctypes.c_float
elif self.dtype is numpy.int32:
elif self.dtype is np.int32:
ct = ctypes.c_int
else:
raise ValueError(
"Invalid type - numpy array only valid for int/float/double"
)

ap = ctypes.cast(self.data_ptr, ctypes.POINTER(ct * len(self)))
return numpy.frombuffer(ap.contents, dtype=self.dtype)
return np.frombuffer(ap.contents, dtype=self.dtype)

def numpy_copy(self):
"""Will return a numpy array which contains a copy of the ResdataKW data.
Expand All @@ -1102,7 +1102,7 @@ def numpy_copy(self):
shared.
"""
view = self.numpyView()
return numpy.copy(view)
return np.copy(view)

def fwrite(self, fortio):
self._fwrite(fortio)
Expand Down
26 changes: 13 additions & 13 deletions python/resdata/summary/rd_sum.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,11 @@
"""

import warnings
import numpy
import numpy as np
import datetime
import os.path
import ctypes
import pandas
import pandas as pd
import re
from typing import Sequence, List, Tuple, Optional

Expand Down Expand Up @@ -463,13 +463,13 @@ def get_values(self, key, report_only=False):
key_index = self._get_general_var_index(key)
if report_only:
index_list = self.report_index_list()
values = numpy.zeros(len(index_list))
values = np.zeros(len(index_list))
for i in range(len(index_list)):
time_index = index_list[i]
values[i] = self._iiget(time_index, key_index)
else:
length = self._data_length()
values = numpy.zeros(length)
values = np.zeros(length)
for i in range(length):
values[i] = self._iiget(i, key_index)

Expand Down Expand Up @@ -516,14 +516,14 @@ def numpy_vector(self, key, time_index=None, report_only=False):
raise ValueError("Can not suuply both time_index and report_only=True")

if time_index is None:
np_vector = numpy.zeros(len(self))
np_vector = np.zeros(len(self))
self._init_numpy_vector(
key, np_vector.ctypes.data_as(ctypes.POINTER(ctypes.c_double))
)
return np_vector
else:
time_vector = self._make_time_vector(time_index)
np_vector = numpy.zeros(len(time_vector))
np_vector = np.zeros(len(time_vector))
self._init_numpy_vector_interp(
key,
time_vector,
Expand All @@ -536,7 +536,7 @@ def numpy_dates(self):
"""
Will return numpy vector of numpy.datetime64() values for all the simulated timepoints.
"""
np_dates = numpy.zeros(len(self), dtype="datetime64[ms]")
np_dates = np.zeros(len(self), dtype="datetime64[ms]")
self._init_numpy_datetime64(
np_dates.ctypes.data_as(ctypes.POINTER(ctypes.c_int64)), 1000
)
Expand Down Expand Up @@ -601,21 +601,21 @@ def pandas_frame(self, time_index=None, column_keys=None):

if time_index is None:
time_index = self.dates
data = numpy.zeros([len(time_index), len(keywords)])
data = np.zeros([len(time_index), len(keywords)])
Summary._init_pandas_frame(
self, keywords, data.ctypes.data_as(ctypes.POINTER(ctypes.c_double))
)
else:
time_points = self._make_time_vector(time_index)
data = numpy.zeros([len(time_points), len(keywords)])
data = np.zeros([len(time_points), len(keywords)])
Summary._init_pandas_frame_interp(
self,
keywords,
time_points,
data.ctypes.data_as(ctypes.POINTER(ctypes.c_double)),
)

frame = pandas.DataFrame(index=time_index, columns=list(keywords), data=data)
frame = pd.DataFrame(index=time_index, columns=list(keywords), data=data)
return frame

@staticmethod
Expand Down Expand Up @@ -716,7 +716,7 @@ def from_pandas(cls, case, frame, dims=None, headers=None):
# Avoid Pandas or numpy timestamps, to avoid Pandas attempting to create
# timestamp64[ns] indices (which can't go beyond year 2262)
# https://github.com/pandas-dev/pandas/issues/39727
if isinstance(start_time, pandas.Timestamp):
if isinstance(start_time, pd.Timestamp):
start_time = start_time.to_pydatetime()

var_list = []
Expand Down Expand Up @@ -1093,7 +1093,7 @@ def get_interp_vector(self, key, days_list=None, date_list=None):
if date_list:
raise ValueError("Must supply either days_list or date_list")
else:
vector = numpy.zeros(len(days_list))
vector = np.zeros(len(days_list))
sim_length = self.sim_length
sim_start = self.first_day
index = 0
Expand All @@ -1106,7 +1106,7 @@ def get_interp_vector(self, key, days_list=None, date_list=None):
elif date_list:
start_time = self.data_start
end_time = self.end_date
vector = numpy.zeros(len(date_list))
vector = np.zeros(len(date_list))
index = 0

for date in date_list:
Expand Down
Loading

0 comments on commit 062f770

Please sign in to comment.