@@ -26,189 +26,201 @@ import org.apache.spark.sql.types.{DataType, Metadata, StructField, StructType}
26
26
27
27
28
28
trait DataTypeWithClass {
29
- val dt : DataType
30
- val cls : Class [_ ]
31
- val nullable : Boolean
29
+ val dt : DataType
30
+ val cls : Class [ _ ]
31
+ val nullable : Boolean
32
32
}
33
33
34
34
trait ComplexWrapper extends DataTypeWithClass
35
35
36
- class KDataTypeWrapper (val dt : StructType
37
- , val cls : Class [_]
38
- , val nullable : Boolean = true ) extends StructType with ComplexWrapper {
39
- override def fieldNames : Array [String ] = dt.fieldNames
36
+ class KDataTypeWrapper (
37
+ val dt : StructType ,
38
+ val cls : Class [ _ ],
39
+ val nullable : Boolean = true ,
40
+ ) extends StructType with ComplexWrapper {
40
41
41
- override def names : Array [String ] = dt.names
42
+ override def fieldNames : Array [ String ] = dt.fieldNames
42
43
43
- override def equals ( that : Any ) : Boolean = dt.equals(that)
44
+ override def names : Array [ String ] = dt.names
44
45
45
- override def hashCode ( ): Int = dt.hashCode( )
46
+ override def equals ( that : Any ): Boolean = dt.equals(that )
46
47
47
- override def add ( field : StructField ): StructType = dt.add(field )
48
+ override def hashCode ( ): Int = dt.hashCode( )
48
49
49
- override def add (name : String , dataType : DataType ): StructType = dt.add(name, dataType )
50
+ override def add (field : StructField ): StructType = dt.add(field )
50
51
51
- override def add (name : String , dataType : DataType , nullable : Boolean ): StructType = dt.add(name, dataType, nullable )
52
+ override def add (name : String , dataType : DataType ): StructType = dt.add(name, dataType)
52
53
53
- override def add (name : String , dataType : DataType , nullable : Boolean , metadata : Metadata ): StructType = dt.add(name, dataType, nullable, metadata )
54
+ override def add (name : String , dataType : DataType , nullable : Boolean ): StructType = dt.add(name, dataType, nullable)
54
55
55
- override def add (name : String , dataType : DataType , nullable : Boolean , comment : String ): StructType = dt.add(name, dataType, nullable, comment)
56
+ override def add (name : String , dataType : DataType , nullable : Boolean , metadata : Metadata ): StructType = dt
57
+ .add(name, dataType, nullable, metadata)
56
58
57
- override def add (name : String , dataType : String ): StructType = dt.add(name, dataType)
59
+ override def add (name : String , dataType : DataType , nullable : Boolean , comment : String ): StructType = dt
60
+ .add(name, dataType, nullable, comment)
58
61
59
- override def add (name : String , dataType : String , nullable : Boolean ): StructType = dt.add(name, dataType, nullable )
62
+ override def add (name : String , dataType : String ): StructType = dt.add(name, dataType)
60
63
61
- override def add (name : String , dataType : String , nullable : Boolean , metadata : Metadata ): StructType = dt.add(name, dataType, nullable, metadata )
64
+ override def add (name : String , dataType : String , nullable : Boolean ): StructType = dt.add(name, dataType, nullable)
62
65
63
- override def add (name : String , dataType : String , nullable : Boolean , comment : String ): StructType = dt.add(name, dataType, nullable, comment)
66
+ override def add (name : String , dataType : String , nullable : Boolean , metadata : Metadata ): StructType = dt
67
+ .add(name, dataType, nullable, metadata)
64
68
65
- override def apply (name : String ): StructField = dt.apply(name)
69
+ override def add (name : String , dataType : String , nullable : Boolean , comment : String ): StructType = dt
70
+ .add(name, dataType, nullable, comment)
66
71
67
- override def apply (names : Set [ String ] ): StructType = dt.apply(names )
72
+ override def apply (name : String ): StructField = dt.apply(name )
68
73
69
- override def fieldIndex ( name : String ): Int = dt.fieldIndex(name )
74
+ override def apply ( names : Set [ String ] ): StructType = dt.apply(names )
70
75
71
- override private [sql] def getFieldIndex (name : String ) = dt.getFieldIndex (name)
76
+ override def fieldIndex (name : String ): Int = dt.fieldIndex (name)
72
77
73
- private [sql] def findNestedField ( fieldNames : Seq [ String ], includeCollections : Boolean , resolver : Resolver ) = dt.findNestedField(fieldNames, includeCollections, resolver )
78
+ override private [ sql ] def getFieldIndex ( name : String ) = dt.getFieldIndex(name )
74
79
75
- override private [sql] def buildFormattedString (prefix : String , stringConcat : StringUtils .StringConcat , maxDepth : Int ): Unit = dt.buildFormattedString(prefix, stringConcat, maxDepth)
80
+ private [ sql ] def findNestedField (fieldNames : Seq [ String ], includeCollections : Boolean , resolver : Resolver ) =
81
+ dt.findNestedField(fieldNames, includeCollections, resolver)
76
82
77
- override protected [sql] def toAttributes : Seq [AttributeReference ] = dt.toAttributes
83
+ override private [ sql ] def buildFormattedString (prefix : String , stringConcat : StringUtils .StringConcat , maxDepth : Int ): Unit =
84
+ dt.buildFormattedString(prefix, stringConcat, maxDepth)
78
85
79
- override def treeString : String = dt.treeString
86
+ override protected [ sql ] def toAttributes : Seq [ AttributeReference ] = dt.toAttributes
80
87
81
- override def treeString ( maxDepth : Int ) : String = dt.treeString(maxDepth)
88
+ override def treeString : String = dt.treeString
82
89
83
- override def printTreeString ( ): Unit = dt.printTreeString( )
90
+ override def treeString ( maxDepth : Int ): String = dt.treeString(maxDepth )
84
91
85
- private [sql] override def jsonValue = dt.jsonValue
92
+ override def printTreeString () : Unit = dt.printTreeString()
86
93
87
- override def apply ( fieldIndex : Int ) : StructField = dt.apply(fieldIndex)
94
+ private [ sql ] override def jsonValue = dt.jsonValue
88
95
89
- override def length : Int = dt.length
96
+ override def apply ( fieldIndex : Int ) : StructField = dt.apply(fieldIndex)
90
97
91
- override def iterator : Iterator [ StructField ] = dt.iterator
98
+ override def length : Int = dt.length
92
99
93
- override def defaultSize : Int = dt.defaultSize
100
+ override def iterator : Iterator [ StructField ] = dt.iterator
94
101
95
- override def simpleString : String = dt.simpleString
102
+ override def defaultSize : Int = dt.defaultSize
96
103
97
- override def catalogString : String = dt.catalogString
104
+ override def simpleString : String = dt.simpleString
98
105
99
- override def sql : String = dt.sql
106
+ override def catalogString : String = dt.catalogString
100
107
101
- override def toDDL : String = dt.toDDL
108
+ override def sql : String = dt.sql
102
109
103
- private [sql] override def simpleString ( maxNumberFields : Int ) = dt.simpleString(maxNumberFields)
110
+ override def toDDL : String = dt.toDDL
104
111
105
- override private [sql] def merge ( that : StructType ) = dt.merge(that )
112
+ private [ sql ] override def simpleString ( maxNumberFields : Int ) = dt.simpleString(maxNumberFields )
106
113
107
- private [spark] override def asNullable = dt.asNullable
114
+ override private [ sql ] def merge ( that : StructType ) = dt.merge(that)
108
115
109
- private [spark] override def existsRecursively ( f : DataType => Boolean ) = dt.existsRecursively(f)
116
+ private [ spark ] override def asNullable = dt.asNullable
110
117
111
- override private [sql] lazy val interpretedOrdering = dt.interpretedOrdering
118
+ private [ spark ] override def existsRecursively ( f : DataType => Boolean ) = dt.existsRecursively(f)
112
119
113
- override def toString = s " KDataTypeWrapper(dt= $dt, cls= $cls, nullable= $nullable) "
120
+ override private [ sql ] lazy val interpretedOrdering = dt.interpretedOrdering
121
+
122
+ override def toString = s " KDataTypeWrapper(dt= $dt, cls= $cls, nullable= $nullable) "
114
123
}
115
124
116
- case class KComplexTypeWrapper (dt : DataType , cls : Class [_], nullable : Boolean ) extends DataType with ComplexWrapper {
117
- override private [sql] def unapply (e : Expression ) = dt.unapply(e)
125
+ case class KComplexTypeWrapper (dt : DataType , cls : Class [ _ ], nullable : Boolean ) extends DataType with ComplexWrapper {
126
+
127
+ override private [ sql ] def unapply (e : Expression ) = dt.unapply(e)
118
128
119
- override def typeName : String = dt.typeName
129
+ override def typeName : String = dt.typeName
120
130
121
- override private [sql] def jsonValue = dt.jsonValue
131
+ override private [ sql ] def jsonValue = dt.jsonValue
122
132
123
- override def json : String = dt.json
133
+ override def json : String = dt.json
124
134
125
- override def prettyJson : String = dt.prettyJson
135
+ override def prettyJson : String = dt.prettyJson
126
136
127
- override def simpleString : String = dt.simpleString
137
+ override def simpleString : String = dt.simpleString
128
138
129
- override def catalogString : String = dt.catalogString
139
+ override def catalogString : String = dt.catalogString
130
140
131
- override private [sql] def simpleString (maxNumberFields : Int ) = dt.simpleString(maxNumberFields)
141
+ override private [ sql ] def simpleString (maxNumberFields : Int ) = dt.simpleString(maxNumberFields)
132
142
133
- override def sql : String = dt.sql
143
+ override def sql : String = dt.sql
134
144
135
- override private [spark] def sameType (other : DataType ) = dt.sameType(other)
145
+ override private [ spark ] def sameType (other : DataType ) = dt.sameType(other)
136
146
137
- override private [spark] def existsRecursively (f : DataType => Boolean ) = dt.existsRecursively(f)
147
+ override private [ spark ] def existsRecursively (f : DataType => Boolean ) = dt.existsRecursively(f)
138
148
139
- private [sql] override def defaultConcreteType = dt.defaultConcreteType
149
+ private [ sql ] override def defaultConcreteType = dt.defaultConcreteType
140
150
141
- private [sql] override def acceptsType (other : DataType ) = dt.acceptsType(other)
151
+ private [ sql ] override def acceptsType (other : DataType ) = dt.acceptsType(other)
142
152
143
- override def defaultSize : Int = dt.defaultSize
153
+ override def defaultSize : Int = dt.defaultSize
144
154
145
- override private [spark] def asNullable = dt.asNullable
155
+ override private [ spark ] def asNullable = dt.asNullable
146
156
147
157
}
148
158
149
- case class KSimpleTypeWrapper (dt : DataType , cls : Class [_ ], nullable : Boolean ) extends DataType with DataTypeWithClass {
150
- override private [sql] def unapply (e : Expression ) = dt.unapply(e)
159
+ case class KSimpleTypeWrapper (dt : DataType , cls : Class [ _ ], nullable : Boolean ) extends DataType with DataTypeWithClass {
160
+ override private [ sql ] def unapply (e : Expression ) = dt.unapply(e)
151
161
152
- override def typeName : String = dt.typeName
162
+ override def typeName : String = dt.typeName
153
163
154
- override private [sql] def jsonValue = dt.jsonValue
164
+ override private [ sql ] def jsonValue = dt.jsonValue
155
165
156
- override def json : String = dt.json
166
+ override def json : String = dt.json
157
167
158
- override def prettyJson : String = dt.prettyJson
168
+ override def prettyJson : String = dt.prettyJson
159
169
160
- override def simpleString : String = dt.simpleString
170
+ override def simpleString : String = dt.simpleString
161
171
162
- override def catalogString : String = dt.catalogString
172
+ override def catalogString : String = dt.catalogString
163
173
164
- override private [sql] def simpleString (maxNumberFields : Int ) = dt.simpleString(maxNumberFields)
174
+ override private [ sql ] def simpleString (maxNumberFields : Int ) = dt.simpleString(maxNumberFields)
165
175
166
- override def sql : String = dt.sql
176
+ override def sql : String = dt.sql
167
177
168
- override private [spark] def sameType (other : DataType ) = dt.sameType(other)
178
+ override private [ spark ] def sameType (other : DataType ) = dt.sameType(other)
169
179
170
- override private [spark] def existsRecursively (f : DataType => Boolean ) = dt.existsRecursively(f)
180
+ override private [ spark ] def existsRecursively (f : DataType => Boolean ) = dt.existsRecursively(f)
171
181
172
- private [sql] override def defaultConcreteType = dt.defaultConcreteType
182
+ private [ sql ] override def defaultConcreteType = dt.defaultConcreteType
173
183
174
- private [sql] override def acceptsType (other : DataType ) = dt.acceptsType(other)
184
+ private [ sql ] override def acceptsType (other : DataType ) = dt.acceptsType(other)
175
185
176
- override def defaultSize : Int = dt.defaultSize
186
+ override def defaultSize : Int = dt.defaultSize
177
187
178
- override private [spark] def asNullable = dt.asNullable
188
+ override private [ spark ] def asNullable = dt.asNullable
179
189
}
180
190
181
191
class KStructField (val getterName : String , val delegate : StructField ) extends StructField {
182
- override private [sql] def buildFormattedString (prefix : String , stringConcat : StringUtils .StringConcat , maxDepth : Int ): Unit = delegate.buildFormattedString(prefix, stringConcat, maxDepth)
183
192
184
- override def toString (): String = delegate.toString()
193
+ override private [ sql ] def buildFormattedString (prefix : String , stringConcat : StringUtils .StringConcat , maxDepth : Int ): Unit =
194
+ delegate.buildFormattedString(prefix, stringConcat, maxDepth)
195
+
196
+ override def toString (): String = delegate.toString()
185
197
186
- override private [sql] def jsonValue = delegate.jsonValue
198
+ override private [ sql ] def jsonValue = delegate.jsonValue
187
199
188
- override def withComment (comment : String ): StructField = delegate.withComment(comment)
200
+ override def withComment (comment : String ): StructField = delegate.withComment(comment)
189
201
190
- override def getComment (): Option [String ] = delegate.getComment()
202
+ override def getComment (): Option [ String ] = delegate.getComment()
191
203
192
- override def toDDL : String = delegate.toDDL
204
+ override def toDDL : String = delegate.toDDL
193
205
194
- override def productElement (n : Int ): Any = delegate.productElement(n)
206
+ override def productElement (n : Int ): Any = delegate.productElement(n)
195
207
196
- override def productArity : Int = delegate.productArity
208
+ override def productArity : Int = delegate.productArity
197
209
198
- override def productIterator : Iterator [Any ] = delegate.productIterator
210
+ override def productIterator : Iterator [ Any ] = delegate.productIterator
199
211
200
- override def productPrefix : String = delegate.productPrefix
212
+ override def productPrefix : String = delegate.productPrefix
201
213
202
- override val dataType : DataType = delegate.dataType
214
+ override val dataType : DataType = delegate.dataType
203
215
204
- override def canEqual (that : Any ): Boolean = delegate.canEqual(that)
216
+ override def canEqual (that : Any ): Boolean = delegate.canEqual(that)
205
217
206
- override val metadata : Metadata = delegate.metadata
207
- override val name : String = delegate.name
208
- override val nullable : Boolean = delegate.nullable
218
+ override val metadata : Metadata = delegate.metadata
219
+ override val name : String = delegate.name
220
+ override val nullable : Boolean = delegate.nullable
209
221
}
210
222
211
223
object helpme {
212
224
213
- def listToSeq (i : java.util.List [_ ]): Seq [_ ] = Seq (i.toArray: _* )
225
+ def listToSeq (i : java.util.List [ _ ]): Seq [ _ ] = Seq (i.toArray: _* )
214
226
}
0 commit comments