Skip to content
This repository has been archived by the owner on Jun 27, 2019. It is now read-only.

Commit

Permalink
Remove Typos
Browse files Browse the repository at this point in the history
Signed-off-by:nirmalsinghania2008 <[email protected]>
  • Loading branch information
nirmalsinghania2008 authored and nirmalsinghania2008 committed Mar 31, 2016
1 parent 754efb5 commit d03c8bb
Show file tree
Hide file tree
Showing 4 changed files with 46 additions and 46 deletions.
28 changes: 14 additions & 14 deletions sml/include/sml.h
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ extern "C" {
* the person leaves. For that, this person has a presence sensor.
*
* Choosing engines and creating variables is straight-forward after
* the solution is modeled. Let's say fuzzy was the choosen one.
* the solution is modeled. Let's say fuzzy was the chosen one.
* Registering callbacks is simple as well:
*
* @dontinclude example_doc.c
Expand Down Expand Up @@ -105,7 +105,7 @@ extern "C" {
* depending in your product. It may be synchronous or asynchronous.
*
* To keep it simple, yet illustrative, we're going to simulate the values
* in a function read_state_cb() Variable sensor_state represents presense
* in a function read_state_cb() Variable sensor_state represents presence
* sensor reading, switch_state represents light state.
*
* This variable will be global since its going to be used by the callback
Expand Down Expand Up @@ -154,7 +154,7 @@ extern "C" {
* - If you want to control a light and an air-conditioning and they
* are independent of each other. Create two SML objects,
* one will control the light and another one the air-conditioning.
* - Try to avoid adding unnecessary inputs/outpus to SML (or forget
* - Try to avoid adding unnecessary inputs/outputs to SML (or forget
* to add relevant inputs/outputs), this may lead the poor
* predictions.
* - Test both engines and check which one has the best results
Expand All @@ -169,8 +169,8 @@ extern "C" {
/**
* @struct sml_object
*
* Instance of the choosen machine learning engine, it may be created with
* sml_fuzzy_new() or sml_ann_new() and shold be deleted after usage with
* Instance of the chosen machine learning engine, it may be created with
* sml_fuzzy_new() or sml_ann_new() and should be deleted after usage with
* sml_free().
*/
struct sml_object;
Expand Down Expand Up @@ -236,7 +236,7 @@ bool sml_load_fll_file(struct sml_object *sml, const char *filename);
void sml_free(struct sml_object *sml);

/**
* @brief Register a read calblack.
* @brief Register a read callblack.
*
* It should be used to set a callback function to read variables
* values. This callback must return true if it was able to
Expand Down Expand Up @@ -303,7 +303,7 @@ int sml_process(struct sml_object *sml);
/**
* @brief Make a prediction based on the most recent observations.
*
* This is usefull for making predictions without the normal SML flow,
* This is useful for making predictions without the normal SML flow,
* without a mainloop and a registered ::sml_change_cb. Take a look
* in the following example:
*
Expand Down Expand Up @@ -475,7 +475,7 @@ struct sml_variables_list *sml_get_output_list(struct sml_object *sml);
* New input variables start with NAN set as value.
*
* @param sml The ::sml_object object.
* @param name The variable name. If lenght is greater
* @param name The variable name. If length is greater
* than ::SML_VARIABLE_NAME_MAX_LEN variable creation will fail.
* @return ::sml_variable on success.
* @return @c NULL on failure.
Expand All @@ -489,7 +489,7 @@ struct sml_variable *sml_new_input(struct sml_object *sml, const char *name);
* the value set on NAN will be used.
*
* @param sml The ::sml_object object.
* @param name The variable name. If lenght is greater
* @param name The variable name. If length is greater
* than ::SML_VARIABLE_NAME_MAX_LEN variable creation will fail.
* @return ::sml_variable on success.
* @return @c NULL on failure.
Expand Down Expand Up @@ -522,7 +522,7 @@ struct sml_variable *sml_get_output(struct sml_object *sml, const char *name);
* @param sml The ::sml_object object.
* @param sml_variable The ::sml_variable
* @param value The desired value.
* @return @c true on succcess.
* @return @c true on success.
* @return @c false on failure.
*/
bool sml_variable_set_value(struct sml_object *sml, struct sml_variable *sml_variable, float value);
Expand All @@ -532,7 +532,7 @@ bool sml_variable_set_value(struct sml_object *sml, struct sml_variable *sml_var
*
* @param sml The ::sml_object object.
* @param sml_variable The ::sml_variable
* @return @c true on succcess.
* @return @c true on success.
* @return @c false on failure.
*/
float sml_variable_get_value(struct sml_object *sml, struct sml_variable *sml_variable);
Expand Down Expand Up @@ -569,7 +569,7 @@ int sml_variable_set_enabled(struct sml_object *sml, struct sml_variable *variab
*
* @param sml The ::sml_object object.
* @param variable The ::sml_variable
* @return @c true on succcess.
* @return @c true on success.
* @return @c false on failure.
*/
bool sml_variable_is_enabled(struct sml_object *sml, struct sml_variable *variable);
Expand All @@ -583,7 +583,7 @@ bool sml_variable_is_enabled(struct sml_object *sml, struct sml_variable *variab
*
* @param sml The ::sml_object object.
* @param variable The ::sml_variable to be removed
* @return @c true on succcess.
* @return @c true on success.
* @return @c false on failure.
*/
bool sml_remove_variable(struct sml_object *sml, struct sml_variable *variable);
Expand All @@ -604,7 +604,7 @@ uint16_t sml_variables_list_get_length(struct sml_object *sml, struct sml_variab
* @param list The ::sml_variables_list.
* @param index The list index.
* @return ::sml_variable on success.
* @return @c NULL on faiulre..
* @return @c NULL on failure..
*/
struct sml_variable *sml_variables_list_index(struct sml_object *sml, struct sml_variables_list *list, uint16_t index);

Expand Down
24 changes: 12 additions & 12 deletions sml/include/sml_ann.h
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@ extern "C" {
* A neural network consists in a set of neurons that are inter-connected
* and distributed in layers, usually three (Input, hidden and output layers).
* For every connection between neurons there is a weight associated to it,
* these weights are initialized randomly with values beetween -0.2 and 0.2 and
* adjusted during the traning phase, so the neural networkt
* these weights are initialized randomly with values between -0.2 and 0.2 and
* adjusted during the training phase, so the neural network
* output predict the right value.
*
* The neuron is the basic unit of the neural network and it's responsible for
Expand All @@ -60,7 +60,7 @@ extern "C" {
*
* Consider that Bob is present (input is 1) and Alice (input is 0) is not.
*
* The first step is to provide the Bob's and Alice's presence to the input nerons,
* The first step is to provide the Bob's and Alice's presence to the input neurons,
* In a neuron network the input neurons are special, because they do not apply the formula (1) to produce an output.
* The output value from a input neuron is the input value itself, so in this case the
* N1 and N2 neurons will output 1 and 0 respectively.
Expand Down Expand Up @@ -95,9 +95,9 @@ extern "C" {
* @remark The values used in the neural network above
* (inputs, outputs, neuron weights) were chosen randomly.
*
* The example above uses two neurons in the hidden layer, however for some problem, two nerons is not good enough.
* The example above uses two neurons in the hidden layer, however for some problem, two neurons is not good enough.
* There is no silver bullet about how many neurons one should use in the hidden layer, this number is obtained by trial and error.
* SML can handle this automatically, during the traning phase SML will automatically choose the neural network topology
* SML can handle this automatically, during the training phase SML will automatically choose the neural network topology
* (how many neurons the hidden layer must have), it will also decide which is the best activation function.
*
* The SML neural network engine has two methods of operation, these methods try
Expand All @@ -108,8 +108,8 @@ extern "C" {
* that has learnt in the past and only accumulate recent memory. This happens due the nature
* of their training. In order to reduce this problem the following methods were implemented.
*
* The first method is called pseudohearsal (the default one), in this method only one neural network is created
* and everytime it needs be retrained, random inputs are genereted and
* The first method is called pseudo-rehearsal (the default one), in this method only one neural network is created
* and everytime it needs be retrained, random inputs are generated and
* feed to the network. The corresponding outputs are stored and used to train the neural network
* with the new collected data.
*
Expand Down Expand Up @@ -188,7 +188,7 @@ bool sml_is_ann(struct sml_object *sml);
bool sml_ann_supported(void);

/**
* @brief Set the neural network trainning algorithm.
* @brief Set the neural network training algorithm.
*
* The training algorithm is responsible for adjusting the neural network weights.
*
Expand All @@ -207,7 +207,7 @@ bool sml_ann_set_training_algorithm(struct sml_object *sml, enum sml_ann_trainin
* Activation functions resides inside the neurons and they are responsible for producing the
* neuron output value. As choosing the correct activation functions may required a
* lot of trial and error tests, the SML uses an algorithm that tries to suit the best
* activiation functions for a given problem.
* activation functions for a given problem.
*
* @remark By default all ::sml_ann_activation_function are used as candidates.
*
Expand Down Expand Up @@ -244,7 +244,7 @@ bool sml_ann_set_max_neurons(struct sml_object *sml, unsigned int max_neurons);
* To do so, it will create (M * 4) neuron candidates (where M is
* the number of activation function candidates) and then N (where N is the number of candidate groups)
* candidate groups will be created, ending up with M*4*N candidate neurons.
* The only different beetwen these candidate groups is the initial weight values.
* The only different between these candidate groups is the initial weight values.
*
* @remark The default number of candidates is 6
*
Expand All @@ -259,7 +259,7 @@ bool sml_ann_set_max_neurons(struct sml_object *sml, unsigned int max_neurons);
bool sml_ann_set_candidate_groups(struct sml_object *sml, unsigned int candidate_groups);

/**
* @brief Set the neral network train epochs
* @brief Set the neural network train epochs
*
* The training epochs is used to know when to stop the training.
* If the desired error is never reached, the training phase will stop
Expand Down Expand Up @@ -329,7 +329,7 @@ bool sml_ann_set_cache_max_size(struct sml_object *sml, unsigned int max_size);
bool sml_ann_set_initial_required_observations(struct sml_object *sml, unsigned int required_observations);

/**
* @brief Set the pseudoreharsal strategy
* @brief Set the pseudorehearsal strategy
*
* For more information about the pseudorehearsal strategy look at the
* \ref Neural_Network_Engine_Introduction
Expand Down
26 changes: 13 additions & 13 deletions sml/include/sml_fuzzy.h
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ enum sml_fuzzy_snorm {

/**
* @enum sml_fuzzy_tnorm
* @brief TNorm rules are also known as conjuntion.
* @brief TNorm rules are also known as conjunction.
*
* @see ::sml_fuzzy_conjunction_set
*/
Expand All @@ -81,11 +81,11 @@ enum sml_fuzzy_tnorm {
enum sml_fuzzy_defuzzifier {
SML_FUZZY_DEFUZZIFIER_BISECTOR, /**< The point that divide the curve into two equal sub-regions */
SML_FUZZY_DEFUZZIFIER_CENTROID, /**< The center of the area under the curve */
SML_FUZZY_DEFUZZIFIER_LARGEST_OF_MAXIMUM, /**< The largest value of the maximun degrees of membership */
SML_FUZZY_DEFUZZIFIER_LARGEST_OF_MAXIMUM, /**< The largest value of the maximum degrees of membership */
SML_FUZZY_DEFUZZIFIER_MEAN_OF_MAXIMUM, /**< The mean of the maximum degrees of membership */
SML_FUZZY_DEFUZZIFIER_SMALLEST_OF_MAXIMUM, /**< The smallest value of the maximun degrees of membership */
SML_FUZZY_DEFUZZIFIER_WEIGHTED_AVERAGE, /**< The avarage of the activation degrees multipled by a weight*/
SML_FUZZY_DEFUZZIFIER_WEIGHTED_SUM, /**< The sum of the activation degrees multipled by a weight*/
SML_FUZZY_DEFUZZIFIER_WEIGHTED_AVERAGE, /**< The average of the activation degrees multiplied by a weight*/
SML_FUZZY_DEFUZZIFIER_WEIGHTED_SUM, /**< The sum of the activation degrees multiplied by a weight*/
}; /**< A fuzzy Defuzzifier type. */

/**
Expand Down Expand Up @@ -138,9 +138,9 @@ bool sml_fuzzy_supported(void);
/**
* @brief Set the conjunction fuzzy rule.
*
* The conjuction rule is equivalent to the boolean and operation.
* The conjunction rule is equivalent to the boolean and operation.
* Example:
* If the fuzzy engie encounters the following expression in a fuzzy rule, "... A and B ...".
* If the fuzzy engine encounters the following expression in a fuzzy rule, "... A and B ...".
* Given that A is 0.6 and B is 0.8 and the conjunction operator is ::SML_FUZZY_TNORM_MINIMUM,
* the result of the operation will be 0.6
*
Expand Down Expand Up @@ -172,7 +172,7 @@ bool sml_fuzzy_conjunction_set(struct sml_object *sml, enum sml_fuzzy_tnorm norm
* Values greater than @c 1 or lower than @c 0 are not accepted and rules with
* weight @c zero are always ignored.
*
* @remark The default weight treshold is 0.05
* @remark The default weight threshold is 0.05
*
* @param sml The ::sml_object object.
* @param weight_threshold The desired threshold.
Expand Down Expand Up @@ -231,7 +231,7 @@ bool sml_fuzzy_output_set_accumulation(struct sml_object *sml, struct sml_variab
* automatically create terms for this variable. Some properties are important
* help fuzzy engine to improve the quality of the created terms.
*
* Width is the width of each creted term. Other important properties are
* Width is the width of each created term. Other important properties are
* is_id, set by ::sml_fuzzy_variable_set_is_id and range (min and max), set by
* ::sml_variable_set_range.
*
Expand Down Expand Up @@ -351,13 +351,13 @@ bool sml_fuzzy_variable_get_is_id(struct sml_object *sml, struct sml_variable *s
struct sml_fuzzy_term *sml_fuzzy_variable_add_term_rectangle(struct sml_object *sml, struct sml_variable *variable, const char *name, float start, float end);

/**
* @brief Add a tirangle term for a variable.
* @brief Add a triangle term for a variable.
*
* A triangle term uses is a mathematical function defined by 3 vertex (a, b c).
* For X coordinates between vertex a and b, function value is
* obtained by the linear function connecting points (vertex_a; 0) to
* (vertex_b; 1). In vertex_b, function value will 1.0 and from vertex_b to vertex_c,
* function value is obtained by the linear function connecting opints (vertex_b; 1.0)
* function value is obtained by the linear function connecting points (vertex_b; 1.0)
* to (vertex_c; 0).
* For all other X values, the function value will be @c zero.
*
Expand All @@ -378,7 +378,7 @@ struct sml_fuzzy_term *sml_fuzzy_variable_add_term_triangle(struct sml_object *s
* @brief Add a cosine term for a variable.
*
* Cosine term value is obtained by the function value from a cosine function
* centered in X coordinate center and with width defined by width paramenter.
* centered in X coordinate center and with width defined by width parameter.
* The maximum value (1.0) of the cosine function is in X coordinate center.
*
* @remark The term name can not contain spaces!
Expand All @@ -397,7 +397,7 @@ struct sml_fuzzy_term *sml_fuzzy_variable_add_term_cosine(struct sml_object *sml
* @brief Add a gaussian term for a variable.
*
* Gaussian term value is obtained by the function value from a gaussian
* function defined by the parametes mean, standard_deviation. The maximum
* function defined by the parameters mean, standard_deviation. The maximum
* value in Y axis of the gaussian function is 1.0.
*
* @remark The term name can not contain spaces!
Expand Down Expand Up @@ -475,7 +475,7 @@ bool sml_fuzzy_variable_remove_term(struct sml_object *sml, struct sml_variable
*
* The rule simplification uses a heuristic to try to simplify the fuzzy rules.
* For example, if the fuzzy engine is controlling a a home Light using two variables
* (Weekday and TimeOfTheDay) we can ha ve the following rules:
* (Weekday and TimeOfTheDay) we can have the following rules:
* <BLOCKQUOTE>
* If Weekday is Sunday and TimeOfTheDay is Night then Light is On. <BR>
* If Weekday is Monday and TimeOfTheDay is Night then Light is On. <BR>
Expand Down
14 changes: 7 additions & 7 deletions sml/include/sml_log.h
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ extern "C" {
* @brief Log level types
*/
enum sml_log_level {
SML_LOG_LEVEL_DEBUG = 1 << 0, /**< Show debug messages. The debug messages will not be loged with SML is compiled in Release mode. */
SML_LOG_LEVEL_DEBUG = 1 << 0, /**< Show debug messages. The debug messages will not be logged with SML is compiled in Release mode. */
SML_LOG_LEVEL_INFO = 1 << 1, /**< Show info messages. */
SML_LOG_LEVEL_WARNING = 1 << 2, /**< Show warning messages. */
SML_LOG_LEVEL_ERROR = 1 << 3, /**< Show error messages. */
Expand All @@ -59,7 +59,7 @@ typedef void (*sml_log_handler_cb)(enum sml_log_level level, const char *msg, vo
/**
* @brief Set a log handler.
*
* This function is usefull if one wants to log SML events in files
* This function is useful if one wants to log SML events in files
* or do not want to log ::SML_LOG_LEVEL_WARNING messages, for example.
* SML provides a default ::sml_log_handler_cb that is automatically set at
* startup, the log level is set to ::SML_LOG_LEVEL_ALL and all messages will be
Expand All @@ -83,7 +83,7 @@ void sml_log_set_log_handler(enum sml_log_level levels, sml_log_handler_cb cb, v
void sml_log_print(enum sml_log_level level, const char *format, ...);

/**
* @brief Syntatic sugar to ::sml_log_print(SML_LOG_LEVEL_DEBUG, "debug message")
* @brief Syntactic sugar to ::sml_log_print(SML_LOG_LEVEL_DEBUG, "debug message")
*
* @param ... A formatted message
*
Expand All @@ -92,7 +92,7 @@ void sml_log_print(enum sml_log_level level, const char *format, ...);
#define sml_debug(...) sml_log_print(SML_LOG_LEVEL_DEBUG, __VA_ARGS__)

/**
* @brief Syntatic sugar to ::sml_log_print(SML_LOG_LEVEL_INFO, "info message")
* @brief Syntactic sugar to ::sml_log_print(SML_LOG_LEVEL_INFO, "info message")
*
* @param ... A formatted message
*
Expand All @@ -101,7 +101,7 @@ void sml_log_print(enum sml_log_level level, const char *format, ...);
#define sml_info(...) sml_log_print(SML_LOG_LEVEL_INFO, __VA_ARGS__)

/**
* @brief Syntatic sugar to ::sml_log_print(SML_LOG_LEVEL_WARNING, "warning message")
* @brief Syntactic sugar to ::sml_log_print(SML_LOG_LEVEL_WARNING, "warning message")
*
* @param ... A formatted message
*
Expand All @@ -110,7 +110,7 @@ void sml_log_print(enum sml_log_level level, const char *format, ...);
#define sml_warning(...) sml_log_print(SML_LOG_LEVEL_WARNING, __VA_ARGS__)

/**
* @brief Syntatic sugar to ::sml_log_print(SML_LOG_LEVEL_ERROR, "error message")
* @brief Syntactic sugar to ::sml_log_print(SML_LOG_LEVEL_ERROR, "error message")
*
* @param ... A formatted message
*
Expand All @@ -119,7 +119,7 @@ void sml_log_print(enum sml_log_level level, const char *format, ...);
#define sml_error(...) sml_log_print(SML_LOG_LEVEL_ERROR, __VA_ARGS__)

/**
* @brief Syntatic sugar to ::sml_log_print(SML_LOG_LEVEL_CRITICAL, "critical message")
* @brief Syntactic sugar to ::sml_log_print(SML_LOG_LEVEL_CRITICAL, "critical message")
*
* @param ... A formatted message
*
Expand Down

0 comments on commit d03c8bb

Please sign in to comment.