Skip to content

Commit

Permalink
Merge pull request #454 from juripetersen/wayang-python-api
Browse files Browse the repository at this point in the history
Add Tensorflow operators to python API
  • Loading branch information
zkaoudi authored Aug 6, 2024
2 parents 97bed9f + 3526e7a commit 3fa3c38
Show file tree
Hide file tree
Showing 50 changed files with 1,345 additions and 322 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/backend.yml
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ jobs:
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven
- name: Run Unit tests
run: ./mvnw clean verify -B -Dmaven.test.skip=false -Dwayang.configuration=file:$(pwd)/tools/test/config/wayang.properties
- name: Build And Install
run: ./mvnw clean install -B -Dmaven.test.skip=true
- name: Run Unit tests
run: ./mvnw clean verify -B -Dmaven.test.skip=false -Dwayang.configuration=file:$(pwd)/tools/test/config/wayang.properties
1 change: 1 addition & 0 deletions python/setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ python_requires = >=3.6
install_requires =
cloudpickle ==3.0.0
requests ==2.31.0
numpy ==1.19.5

tests_require =
unitest ==1.3.5
Expand Down
16 changes: 16 additions & 0 deletions python/src/pywy/basic/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
16 changes: 16 additions & 0 deletions python/src/pywy/basic/model/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
Original file line number Diff line number Diff line change
Expand Up @@ -14,24 +14,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pywy.basic.model.ops import Op

import unittest
#from typing import Tuple, Callable, Iterable
from pywy.dataquanta import WayangContext
from unittest.mock import Mock
from pywy.platforms.java import JavaPlugin
from pywy.platforms.spark import SparkPlugin

class TestPlanToJson(unittest.TestCase):
def test_to_json(self):
ctx = WayangContext() \
.register({JavaPlugin, SparkPlugin}) \
.textfile("file:///var/www/html/data/in.txt") \
.map(lambda x: int(x)) \
.map(lambda x: int(x) + 1) \
.filter(lambda x: int(x) < 12) \
.store_textfile("file:///var/www/html/data/out-python.txt")
self.assertEqual(True, True)
class Model:
pass

if __name__ == "__main__":
unittest.main()

class DLModel(Model):
def __init__(self, out: Op):
self.out = out

def get_out(self):
return self.out
235 changes: 235 additions & 0 deletions python/src/pywy/basic/model/ops.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,235 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

from typing import List
from itertools import count


class Op:
CNT = count(0)

class DType:
ANY = 'ANY'
INT32 = 'INT32'
INT64 = 'INT64'
FLOAT32 = 'FLOAT32'
FLOAT64 = 'FLOAT64'
BYTE = 'BYTE'
INT16 = 'INT16'
BOOL = 'BOOL'

def __init__(self, dType: DType, name=None, opType=None):
if name is None:
self.name = self.__class__.__name__
else:
self.name = name
self.fromList: List[Op] = []
self.dType = dType
self.opType = opType

def get_name(self):
return self.name

def get_dType(self):
return self.dType

def get_fromList(self):
return self.fromList

def with_ops(self, *ops):
assert not self.fromList
assert len(ops) == self.inputs_required()
for op in ops:
assert self.name != op.name
self.fromList.extend(ops)
return self

def inputs_required(self):
pass

def to_dict(self):
output = {}
output['op'] = self.name
output['opType'] = self.opType
output['dType'] = self.dType
output['fromList'] = list(map(lambda child: child.to_dict(),self.fromList))
output["dim"] = None
output["labels"] = None
output["inFeatures"] = None
output["outFeatures"] = None
output["bias"] = None

if hasattr(self, "dim"):
output["dim"] = self.dim

if hasattr(self, "labels"):
output["labels"] = self.labels

if hasattr(self, "inFeatures"):
output["inFeatures"] = self.inFeatures

if hasattr(self, "outFeatures"):
output["outFeatures"] = self.outFeatures

if hasattr(self, "bias"):
output["bias"] = self.bias

return output



class ArgMax(Op):
def __init__(self, dim, name=None):
super().__init__(Op.DType.INT32, name)
self.dim = dim

def get_dim(self):
return self.dim

def inputs_required(self):
return 1


class Cast(Op):
def __init__(self, dType, name=None):
super().__init__(dType, name)

def inputs_required(self):
return 1


class Eq(Op):
def __init__(self, name=None):
super().__init__(Op.DType.BOOL, name)

def inputs_required(self):
return 2


class Input(Op):
class Type:
FEATURES = "..FEATURES.."
LABEL = "..LABEL.."
PREDICTED = "..PREDICTED.."

def __init__(self, name):
self.name = name

def get_name(self):
return self.name

def __init__(self, opType=None, dType=Op.DType.FLOAT32, name=None):
if opType is not None:
super().__init__(dType=dType, opType=opType)
else:
super().__init__(dType=dType, name=name)

def inputs_required(self):
return 0


class Mean(Op):
def __init__(self, dim, name=None):
super().__init__(Op.DType.FLOAT32, name)
self.dim = dim

def get_dim(self):
return self.dim

def get_dType(self):
if self.fromList and self.fromList[0].get_dType() == Op.DType.FLOAT64:
return Op.DType.FLOAT64
return Op.DType.FLOAT32

def inputs_required(self):
return 1


class CrossEntropyLoss(Op):
def __init__(self, labels, name=None):
super().__init__(Op.DType.FLOAT32, name)
self.labels = labels

def get_labels(self):
return self.labels

def get_dType(self):
if self.fromList and self.fromList[0].get_dType() == Op.DType.FLOAT64:
return Op.DType.FLOAT64
return Op.DType.FLOAT32

def inputs_required(self):
return 2


class Linear(Op):
def __init__(self, inFeatures, outFeatures, bias, name=None, dType=Op.DType.FLOAT32):
super().__init__(dType, name)
self.inFeatures = inFeatures
self.outFeatures = outFeatures
self.bias = bias

def get_in_features(self):
return self.inFeatures

def get_out_features(self):
return self.outFeatures

def get_bias(self):
return self.bias

def inputs_required(self):
return 1


class ReLU(Op):
def __init__(self, name=None):
super().__init__(Op.DType.FLOAT32, name)

def get_dType(self):
if self.fromList:
return self.fromList[0].get_dType()
return Op.DType.FLOAT32

def inputs_required(self):
return 1


class Sigmoid(Op):
def __init__(self, name=None):
super().__init__(Op.DType.FLOAT32, name)

def get_dType(self):
if self.fromList and self.fromList[0].get_dType() == Op.DType.FLOAT64:
return Op.DType.FLOAT64
return Op.DType.FLOAT32

def inputs_required(self):
return 1


class Softmax(Op):
def __init__(self, name=None):
super().__init__(Op.DType.FLOAT32, name)

def get_dType(self):
if self.fromList and self.fromList[0].get_dType() == Op.DType.FLOAT64:
return Op.DType.FLOAT64
return Op.DType.FLOAT32

def inputs_required(self):
return 1
63 changes: 63 additions & 0 deletions python/src/pywy/basic/model/optimizer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#


from itertools import count

class Optimizer:
_CNT = count(0)

def __init__(self, learningRate, name=None):
self.learningRate = learningRate
if name is not None:
self.name = name
else:
self.name = self.__class__.__name__

def get_name(self):
return self.name

def get_learning_rate(self):
return self.learningRate

def to_dict(self):
return { \
"name": self.name, \
"learningRate": self.learningRate, \
}


class Adam(Optimizer):
def __init__(self, learningRate, betaOne=0.9, betaTwo=0.999, epsilon=1e-8, name=None):
super().__init__(learningRate, name)
self.betaOne = betaOne
self.betaTwo = betaTwo
self.epsilon = epsilon

def get_beta_one(self):
return self.betaOne

def get_beta_two(self):
return self.betaTwo

def get_epsilon(self):
return self.epsilon


class GradientDescent(Optimizer):
def __init__(self, learningRate, name=None):
super().__init__(learningRate, name)
Loading

0 comments on commit 3fa3c38

Please sign in to comment.